blob: 86ef0853c7627756dbca67096732ab3cb4940bf3 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
Eino-Ville Talvala0b3fcb02017-07-13 16:52:32 -0700100#define MISSING_REQUEST_BUF_TIMEOUT 10
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700136// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
137#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
138
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700139// Whether to check for the GPU stride padding, or use the default
140//#define CHECK_GPU_PIXEL_ALIGNMENT
141
Thierry Strudel3d639192016-09-09 11:52:26 -0700142cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
143const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
144extern pthread_mutex_t gCamLock;
145volatile uint32_t gCamHal3LogLevel = 1;
146extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800148// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700150std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700151bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
152std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
153bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700154bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700155bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
160Mutex gHdrPlusClientLock; // Protect above Easel related variables.
161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
280};
281
282const QCamera3HardwareInterface::QCameraMap<
283 camera_metadata_enum_android_flash_mode_t,
284 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
285 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
286 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
287 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
288};
289
290const QCamera3HardwareInterface::QCameraMap<
291 camera_metadata_enum_android_statistics_face_detect_mode_t,
292 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
296};
297
298const QCamera3HardwareInterface::QCameraMap<
299 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
300 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
301 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
302 CAM_FOCUS_UNCALIBRATED },
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
304 CAM_FOCUS_APPROXIMATE },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
306 CAM_FOCUS_CALIBRATED }
307};
308
309const QCamera3HardwareInterface::QCameraMap<
310 camera_metadata_enum_android_lens_state_t,
311 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
312 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
313 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
314};
315
316const int32_t available_thumbnail_sizes[] = {0, 0,
317 176, 144,
318 240, 144,
319 256, 144,
320 240, 160,
321 256, 154,
322 240, 240,
323 320, 240};
324
325const QCamera3HardwareInterface::QCameraMap<
326 camera_metadata_enum_android_sensor_test_pattern_mode_t,
327 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
334};
335
336/* Since there is no mapping for all the options some Android enum are not listed.
337 * Also, the order in this list is important because while mapping from HAL to Android it will
338 * traverse from lower to higher index which means that for HAL values that are map to different
339 * Android values, the traverse logic will select the first one found.
340 */
341const QCamera3HardwareInterface::QCameraMap<
342 camera_metadata_enum_android_sensor_reference_illuminant1_t,
343 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
360};
361
362const QCamera3HardwareInterface::QCameraMap<
363 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
364 { 60, CAM_HFR_MODE_60FPS},
365 { 90, CAM_HFR_MODE_90FPS},
366 { 120, CAM_HFR_MODE_120FPS},
367 { 150, CAM_HFR_MODE_150FPS},
368 { 180, CAM_HFR_MODE_180FPS},
369 { 210, CAM_HFR_MODE_210FPS},
370 { 240, CAM_HFR_MODE_240FPS},
371 { 480, CAM_HFR_MODE_480FPS},
372};
373
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700374const QCamera3HardwareInterface::QCameraMap<
375 qcamera3_ext_instant_aec_mode_t,
376 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
377 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
380};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800381
382const QCamera3HardwareInterface::QCameraMap<
383 qcamera3_ext_exposure_meter_mode_t,
384 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
385 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
386 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
387 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
388 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
389 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
390 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
391 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
392};
393
394const QCamera3HardwareInterface::QCameraMap<
395 qcamera3_ext_iso_mode_t,
396 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
397 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
398 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
399 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
400 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
401 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
402 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
403 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
404 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
405};
406
Thierry Strudel3d639192016-09-09 11:52:26 -0700407camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
408 .initialize = QCamera3HardwareInterface::initialize,
409 .configure_streams = QCamera3HardwareInterface::configure_streams,
410 .register_stream_buffers = NULL,
411 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
412 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
413 .get_metadata_vendor_tag_ops = NULL,
414 .dump = QCamera3HardwareInterface::dump,
415 .flush = QCamera3HardwareInterface::flush,
416 .reserved = {0},
417};
418
419// initialise to some default value
420uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
421
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700422static inline void logEaselEvent(const char *tag, const char *event) {
423 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
424 struct timespec ts = {};
425 static int64_t kMsPerSec = 1000;
426 static int64_t kNsPerMs = 1000000;
427 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
428 if (res != OK) {
429 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
430 } else {
431 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
432 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
433 }
434 }
435}
436
Thierry Strudel3d639192016-09-09 11:52:26 -0700437/*===========================================================================
438 * FUNCTION : QCamera3HardwareInterface
439 *
440 * DESCRIPTION: constructor of QCamera3HardwareInterface
441 *
442 * PARAMETERS :
443 * @cameraId : camera ID
444 *
445 * RETURN : none
446 *==========================================================================*/
447QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
448 const camera_module_callbacks_t *callbacks)
449 : mCameraId(cameraId),
450 mCameraHandle(NULL),
451 mCameraInitialized(false),
452 mCallbackOps(NULL),
453 mMetadataChannel(NULL),
454 mPictureChannel(NULL),
455 mRawChannel(NULL),
456 mSupportChannel(NULL),
457 mAnalysisChannel(NULL),
458 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700459 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700460 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800461 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100462 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800463 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700464 mChannelHandle(0),
465 mFirstConfiguration(true),
466 mFlush(false),
467 mFlushPerf(false),
468 mParamHeap(NULL),
469 mParameters(NULL),
470 mPrevParameters(NULL),
471 m_bIsVideo(false),
472 m_bIs4KVideo(false),
473 m_bEisSupportedSize(false),
474 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800475 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700476 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700477 mShutterDispatcher(this),
478 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 mMinProcessedFrameDuration(0),
480 mMinJpegFrameDuration(0),
481 mMinRawFrameDuration(0),
482 mMetaFrameCount(0U),
483 mUpdateDebugLevel(false),
484 mCallbacks(callbacks),
485 mCaptureIntent(0),
486 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700487 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800488 /* DevCamDebug metadata internal m control*/
489 mDevCamDebugMetaEnable(0),
490 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700491 mBatchSize(0),
492 mToBeQueuedVidBufs(0),
493 mHFRVideoFps(DEFAULT_VIDEO_FPS),
494 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800495 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800496 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700497 mFirstFrameNumberInBatch(0),
498 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800499 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700500 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
501 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000502 mPDSupported(false),
503 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700504 mInstantAEC(false),
505 mResetInstantAEC(false),
506 mInstantAECSettledFrameNumber(0),
507 mAecSkipDisplayFrameBound(0),
508 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800509 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700512 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mState(CLOSED),
514 mIsDeviceLinked(false),
515 mIsMainCamera(true),
516 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700517 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800518 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800519 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700520 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800521 mIsApInputUsedForHdrPlus(false),
522 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700523 m_bSensorHDREnabled(false),
524 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700525{
526 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCommon.init(gCamCapability[cameraId]);
528 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700529#ifndef USE_HAL_3_3
530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
531#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700533#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCameraDevice.common.close = close_camera_device;
535 mCameraDevice.ops = &mCameraOps;
536 mCameraDevice.priv = this;
537 gCamCapability[cameraId]->version = CAM_HAL_V3;
538 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
539 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
540 gCamCapability[cameraId]->min_num_pp_bufs = 3;
541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800544 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700545 mPendingLiveRequest = 0;
546 mCurrentRequestId = -1;
547 pthread_mutex_init(&mMutex, NULL);
548
549 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
550 mDefaultMetadata[i] = NULL;
551
552 // Getting system props of different kinds
553 char prop[PROPERTY_VALUE_MAX];
554 memset(prop, 0, sizeof(prop));
555 property_get("persist.camera.raw.dump", prop, "0");
556 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800557 property_get("persist.camera.hal3.force.hdr", prop, "0");
558 mForceHdrSnapshot = atoi(prop);
559
Thierry Strudel3d639192016-09-09 11:52:26 -0700560 if (mEnableRawDump)
561 LOGD("Raw dump from Camera HAL enabled");
562
563 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
564 memset(mLdafCalib, 0, sizeof(mLdafCalib));
565
566 memset(prop, 0, sizeof(prop));
567 property_get("persist.camera.tnr.preview", prop, "0");
568 m_bTnrPreview = (uint8_t)atoi(prop);
569
570 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800571 property_get("persist.camera.swtnr.preview", prop, "1");
572 m_bSwTnrPreview = (uint8_t)atoi(prop);
573
574 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700575 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 m_bTnrVideo = (uint8_t)atoi(prop);
577
578 memset(prop, 0, sizeof(prop));
579 property_get("persist.camera.avtimer.debug", prop, "0");
580 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800581 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700582
Thierry Strudel54dc9782017-02-15 12:12:10 -0800583 memset(prop, 0, sizeof(prop));
584 property_get("persist.camera.cacmode.disable", prop, "0");
585 m_cacModeDisabled = (uint8_t)atoi(prop);
586
Thierry Strudel3d639192016-09-09 11:52:26 -0700587 //Load and read GPU library.
588 lib_surface_utils = NULL;
589 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700590 mSurfaceStridePadding = CAM_PAD_TO_64;
591#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700592 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
593 if (lib_surface_utils) {
594 *(void **)&LINK_get_surface_pixel_alignment =
595 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
596 if (LINK_get_surface_pixel_alignment) {
597 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
598 }
599 dlclose(lib_surface_utils);
600 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700601#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000602 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
603 mPDSupported = (0 <= mPDIndex) ? true : false;
604
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700605 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700606}
607
608/*===========================================================================
609 * FUNCTION : ~QCamera3HardwareInterface
610 *
611 * DESCRIPTION: destructor of QCamera3HardwareInterface
612 *
613 * PARAMETERS : none
614 *
615 * RETURN : none
616 *==========================================================================*/
617QCamera3HardwareInterface::~QCamera3HardwareInterface()
618{
619 LOGD("E");
620
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800621 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700622
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800623 // Disable power hint and enable the perf lock for close camera
624 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
625 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
626
627 // unlink of dualcam during close camera
628 if (mIsDeviceLinked) {
629 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
630 &m_pDualCamCmdPtr->bundle_info;
631 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
632 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
633 pthread_mutex_lock(&gCamLock);
634
635 if (mIsMainCamera == 1) {
636 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
637 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
638 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
639 // related session id should be session id of linked session
640 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
641 } else {
642 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
643 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
644 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
645 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
646 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800647 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800648 pthread_mutex_unlock(&gCamLock);
649
650 rc = mCameraHandle->ops->set_dual_cam_cmd(
651 mCameraHandle->camera_handle);
652 if (rc < 0) {
653 LOGE("Dualcam: Unlink failed, but still proceed to close");
654 }
655 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700656
657 /* We need to stop all streams before deleting any stream */
658 if (mRawDumpChannel) {
659 mRawDumpChannel->stop();
660 }
661
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700662 if (mHdrPlusRawSrcChannel) {
663 mHdrPlusRawSrcChannel->stop();
664 }
665
Thierry Strudel3d639192016-09-09 11:52:26 -0700666 // NOTE: 'camera3_stream_t *' objects are already freed at
667 // this stage by the framework
668 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
669 it != mStreamInfo.end(); it++) {
670 QCamera3ProcessingChannel *channel = (*it)->channel;
671 if (channel) {
672 channel->stop();
673 }
674 }
675 if (mSupportChannel)
676 mSupportChannel->stop();
677
678 if (mAnalysisChannel) {
679 mAnalysisChannel->stop();
680 }
681 if (mMetadataChannel) {
682 mMetadataChannel->stop();
683 }
684 if (mChannelHandle) {
685 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700686 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700687 LOGD("stopping channel %d", mChannelHandle);
688 }
689
690 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
691 it != mStreamInfo.end(); it++) {
692 QCamera3ProcessingChannel *channel = (*it)->channel;
693 if (channel)
694 delete channel;
695 free (*it);
696 }
697 if (mSupportChannel) {
698 delete mSupportChannel;
699 mSupportChannel = NULL;
700 }
701
702 if (mAnalysisChannel) {
703 delete mAnalysisChannel;
704 mAnalysisChannel = NULL;
705 }
706 if (mRawDumpChannel) {
707 delete mRawDumpChannel;
708 mRawDumpChannel = NULL;
709 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700710 if (mHdrPlusRawSrcChannel) {
711 delete mHdrPlusRawSrcChannel;
712 mHdrPlusRawSrcChannel = NULL;
713 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700714 if (mDummyBatchChannel) {
715 delete mDummyBatchChannel;
716 mDummyBatchChannel = NULL;
717 }
718
719 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800720 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700721
722 if (mMetadataChannel) {
723 delete mMetadataChannel;
724 mMetadataChannel = NULL;
725 }
726
727 /* Clean up all channels */
728 if (mCameraInitialized) {
729 if(!mFirstConfiguration){
730 //send the last unconfigure
731 cam_stream_size_info_t stream_config_info;
732 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
733 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
734 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800735 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700736 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700737 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
739 stream_config_info);
740 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
741 if (rc < 0) {
742 LOGE("set_parms failed for unconfigure");
743 }
744 }
745 deinitParameters();
746 }
747
748 if (mChannelHandle) {
749 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
750 mChannelHandle);
751 LOGH("deleting channel %d", mChannelHandle);
752 mChannelHandle = 0;
753 }
754
755 if (mState != CLOSED)
756 closeCamera();
757
758 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
759 req.mPendingBufferList.clear();
760 }
761 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 for (pendingRequestIterator i = mPendingRequestsList.begin();
763 i != mPendingRequestsList.end();) {
764 i = erasePendingRequest(i);
765 }
766 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
767 if (mDefaultMetadata[i])
768 free_camera_metadata(mDefaultMetadata[i]);
769
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800770 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700771
772 pthread_cond_destroy(&mRequestCond);
773
774 pthread_cond_destroy(&mBuffersCond);
775
776 pthread_mutex_destroy(&mMutex);
777 LOGD("X");
778}
779
780/*===========================================================================
781 * FUNCTION : erasePendingRequest
782 *
783 * DESCRIPTION: function to erase a desired pending request after freeing any
784 * allocated memory
785 *
786 * PARAMETERS :
787 * @i : iterator pointing to pending request to be erased
788 *
789 * RETURN : iterator pointing to the next request
790 *==========================================================================*/
791QCamera3HardwareInterface::pendingRequestIterator
792 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
793{
794 if (i->input_buffer != NULL) {
795 free(i->input_buffer);
796 i->input_buffer = NULL;
797 }
798 if (i->settings != NULL)
799 free_camera_metadata((camera_metadata_t*)i->settings);
800 return mPendingRequestsList.erase(i);
801}
802
803/*===========================================================================
804 * FUNCTION : camEvtHandle
805 *
806 * DESCRIPTION: Function registered to mm-camera-interface to handle events
807 *
808 * PARAMETERS :
809 * @camera_handle : interface layer camera handle
810 * @evt : ptr to event
811 * @user_data : user data ptr
812 *
813 * RETURN : none
814 *==========================================================================*/
815void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
816 mm_camera_event_t *evt,
817 void *user_data)
818{
819 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
820 if (obj && evt) {
821 switch(evt->server_event_type) {
822 case CAM_EVENT_TYPE_DAEMON_DIED:
823 pthread_mutex_lock(&obj->mMutex);
824 obj->mState = ERROR;
825 pthread_mutex_unlock(&obj->mMutex);
826 LOGE("Fatal, camera daemon died");
827 break;
828
829 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
830 LOGD("HAL got request pull from Daemon");
831 pthread_mutex_lock(&obj->mMutex);
832 obj->mWokenUpByDaemon = true;
833 obj->unblockRequestIfNecessary();
834 pthread_mutex_unlock(&obj->mMutex);
835 break;
836
837 default:
838 LOGW("Warning: Unhandled event %d",
839 evt->server_event_type);
840 break;
841 }
842 } else {
843 LOGE("NULL user_data/evt");
844 }
845}
846
847/*===========================================================================
848 * FUNCTION : openCamera
849 *
850 * DESCRIPTION: open camera
851 *
852 * PARAMETERS :
853 * @hw_device : double ptr for camera device struct
854 *
855 * RETURN : int32_t type of status
856 * NO_ERROR -- success
857 * none-zero failure code
858 *==========================================================================*/
859int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
860{
861 int rc = 0;
862 if (mState != CLOSED) {
863 *hw_device = NULL;
864 return PERMISSION_DENIED;
865 }
866
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700867 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800868 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
870 mCameraId);
871
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700872 if (mCameraHandle) {
873 LOGE("Failure: Camera already opened");
874 return ALREADY_EXISTS;
875 }
876
877 {
878 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700879 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700880 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen933db802017-07-14 14:31:53 -0700881 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700882 if (rc != 0) {
883 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
884 return rc;
885 }
886 }
887 }
888
Thierry Strudel3d639192016-09-09 11:52:26 -0700889 rc = openCamera();
890 if (rc == 0) {
891 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800892 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700893 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700894
895 // Suspend Easel because opening camera failed.
896 {
897 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700898 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
899 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700900 if (suspendErr != 0) {
901 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
902 strerror(-suspendErr), suspendErr);
903 }
904 }
905 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800906 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700907
Thierry Strudel3d639192016-09-09 11:52:26 -0700908 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
909 mCameraId, rc);
910
911 if (rc == NO_ERROR) {
912 mState = OPENED;
913 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800914
Thierry Strudel3d639192016-09-09 11:52:26 -0700915 return rc;
916}
917
918/*===========================================================================
919 * FUNCTION : openCamera
920 *
921 * DESCRIPTION: open camera
922 *
923 * PARAMETERS : none
924 *
925 * RETURN : int32_t type of status
926 * NO_ERROR -- success
927 * none-zero failure code
928 *==========================================================================*/
929int QCamera3HardwareInterface::openCamera()
930{
931 int rc = 0;
932 char value[PROPERTY_VALUE_MAX];
933
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800934 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800935
Thierry Strudel3d639192016-09-09 11:52:26 -0700936 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
937 if (rc < 0) {
938 LOGE("Failed to reserve flash for camera id: %d",
939 mCameraId);
940 return UNKNOWN_ERROR;
941 }
942
943 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
944 if (rc) {
945 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
946 return rc;
947 }
948
949 if (!mCameraHandle) {
950 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
951 return -ENODEV;
952 }
953
954 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
955 camEvtHandle, (void *)this);
956
957 if (rc < 0) {
958 LOGE("Error, failed to register event callback");
959 /* Not closing camera here since it is already handled in destructor */
960 return FAILED_TRANSACTION;
961 }
962
963 mExifParams.debug_params =
964 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
965 if (mExifParams.debug_params) {
966 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
967 } else {
968 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
969 return NO_MEMORY;
970 }
971 mFirstConfiguration = true;
972
973 //Notify display HAL that a camera session is active.
974 //But avoid calling the same during bootup because camera service might open/close
975 //cameras at boot time during its initialization and display service will also internally
976 //wait for camera service to initialize first while calling this display API, resulting in a
977 //deadlock situation. Since boot time camera open/close calls are made only to fetch
978 //capabilities, no need of this display bw optimization.
979 //Use "service.bootanim.exit" property to know boot status.
980 property_get("service.bootanim.exit", value, "0");
981 if (atoi(value) == 1) {
982 pthread_mutex_lock(&gCamLock);
983 if (gNumCameraSessions++ == 0) {
984 setCameraLaunchStatus(true);
985 }
986 pthread_mutex_unlock(&gCamLock);
987 }
988
989 //fill the session id needed while linking dual cam
990 pthread_mutex_lock(&gCamLock);
991 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
992 &sessionId[mCameraId]);
993 pthread_mutex_unlock(&gCamLock);
994
995 if (rc < 0) {
996 LOGE("Error, failed to get sessiion id");
997 return UNKNOWN_ERROR;
998 } else {
999 //Allocate related cam sync buffer
1000 //this is needed for the payload that goes along with bundling cmd for related
1001 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001002 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1003 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001004 if(rc != OK) {
1005 rc = NO_MEMORY;
1006 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1007 return NO_MEMORY;
1008 }
1009
1010 //Map memory for related cam sync buffer
1011 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001012 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1013 m_pDualCamCmdHeap->getFd(0),
1014 sizeof(cam_dual_camera_cmd_info_t),
1015 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001016 if(rc < 0) {
1017 LOGE("Dualcam: failed to map Related cam sync buffer");
1018 rc = FAILED_TRANSACTION;
1019 return NO_MEMORY;
1020 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001021 m_pDualCamCmdPtr =
1022 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001023 }
1024
1025 LOGH("mCameraId=%d",mCameraId);
1026
1027 return NO_ERROR;
1028}
1029
1030/*===========================================================================
1031 * FUNCTION : closeCamera
1032 *
1033 * DESCRIPTION: close camera
1034 *
1035 * PARAMETERS : none
1036 *
1037 * RETURN : int32_t type of status
1038 * NO_ERROR -- success
1039 * none-zero failure code
1040 *==========================================================================*/
1041int QCamera3HardwareInterface::closeCamera()
1042{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001043 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001044 int rc = NO_ERROR;
1045 char value[PROPERTY_VALUE_MAX];
1046
1047 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1048 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001049
1050 // unmap memory for related cam sync buffer
1051 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001052 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 if (NULL != m_pDualCamCmdHeap) {
1054 m_pDualCamCmdHeap->deallocate();
1055 delete m_pDualCamCmdHeap;
1056 m_pDualCamCmdHeap = NULL;
1057 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001058 }
1059
Thierry Strudel3d639192016-09-09 11:52:26 -07001060 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1061 mCameraHandle = NULL;
1062
1063 //reset session id to some invalid id
1064 pthread_mutex_lock(&gCamLock);
1065 sessionId[mCameraId] = 0xDEADBEEF;
1066 pthread_mutex_unlock(&gCamLock);
1067
1068 //Notify display HAL that there is no active camera session
1069 //but avoid calling the same during bootup. Refer to openCamera
1070 //for more details.
1071 property_get("service.bootanim.exit", value, "0");
1072 if (atoi(value) == 1) {
1073 pthread_mutex_lock(&gCamLock);
1074 if (--gNumCameraSessions == 0) {
1075 setCameraLaunchStatus(false);
1076 }
1077 pthread_mutex_unlock(&gCamLock);
1078 }
1079
Thierry Strudel3d639192016-09-09 11:52:26 -07001080 if (mExifParams.debug_params) {
1081 free(mExifParams.debug_params);
1082 mExifParams.debug_params = NULL;
1083 }
1084 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1085 LOGW("Failed to release flash for camera id: %d",
1086 mCameraId);
1087 }
1088 mState = CLOSED;
1089 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1090 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001091
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001092 {
1093 Mutex::Autolock l(gHdrPlusClientLock);
1094 if (gHdrPlusClient != nullptr) {
1095 // Disable HDR+ mode.
1096 disableHdrPlusModeLocked();
1097 // Disconnect Easel if it's connected.
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001098 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001099 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001100 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001101
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001102 if (EaselManagerClientOpened) {
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001103 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001104 if (rc != 0) {
1105 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1106 }
1107
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001108 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001109 if (rc != 0) {
1110 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1111 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001112 }
1113 }
1114
Thierry Strudel3d639192016-09-09 11:52:26 -07001115 return rc;
1116}
1117
1118/*===========================================================================
1119 * FUNCTION : initialize
1120 *
1121 * DESCRIPTION: Initialize frameworks callback functions
1122 *
1123 * PARAMETERS :
1124 * @callback_ops : callback function to frameworks
1125 *
1126 * RETURN :
1127 *
1128 *==========================================================================*/
1129int QCamera3HardwareInterface::initialize(
1130 const struct camera3_callback_ops *callback_ops)
1131{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001132 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001133 int rc;
1134
1135 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1136 pthread_mutex_lock(&mMutex);
1137
1138 // Validate current state
1139 switch (mState) {
1140 case OPENED:
1141 /* valid state */
1142 break;
1143 default:
1144 LOGE("Invalid state %d", mState);
1145 rc = -ENODEV;
1146 goto err1;
1147 }
1148
1149 rc = initParameters();
1150 if (rc < 0) {
1151 LOGE("initParamters failed %d", rc);
1152 goto err1;
1153 }
1154 mCallbackOps = callback_ops;
1155
1156 mChannelHandle = mCameraHandle->ops->add_channel(
1157 mCameraHandle->camera_handle, NULL, NULL, this);
1158 if (mChannelHandle == 0) {
1159 LOGE("add_channel failed");
1160 rc = -ENOMEM;
1161 pthread_mutex_unlock(&mMutex);
1162 return rc;
1163 }
1164
1165 pthread_mutex_unlock(&mMutex);
1166 mCameraInitialized = true;
1167 mState = INITIALIZED;
1168 LOGI("X");
1169 return 0;
1170
1171err1:
1172 pthread_mutex_unlock(&mMutex);
1173 return rc;
1174}
1175
1176/*===========================================================================
1177 * FUNCTION : validateStreamDimensions
1178 *
1179 * DESCRIPTION: Check if the configuration requested are those advertised
1180 *
1181 * PARAMETERS :
1182 * @stream_list : streams to be configured
1183 *
1184 * RETURN :
1185 *
1186 *==========================================================================*/
1187int QCamera3HardwareInterface::validateStreamDimensions(
1188 camera3_stream_configuration_t *streamList)
1189{
1190 int rc = NO_ERROR;
1191 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001192 uint32_t depthWidth = 0;
1193 uint32_t depthHeight = 0;
1194 if (mPDSupported) {
1195 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1196 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1197 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001198
1199 camera3_stream_t *inputStream = NULL;
1200 /*
1201 * Loop through all streams to find input stream if it exists*
1202 */
1203 for (size_t i = 0; i< streamList->num_streams; i++) {
1204 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1205 if (inputStream != NULL) {
1206 LOGE("Error, Multiple input streams requested");
1207 return -EINVAL;
1208 }
1209 inputStream = streamList->streams[i];
1210 }
1211 }
1212 /*
1213 * Loop through all streams requested in configuration
1214 * Check if unsupported sizes have been requested on any of them
1215 */
1216 for (size_t j = 0; j < streamList->num_streams; j++) {
1217 bool sizeFound = false;
1218 camera3_stream_t *newStream = streamList->streams[j];
1219
1220 uint32_t rotatedHeight = newStream->height;
1221 uint32_t rotatedWidth = newStream->width;
1222 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1223 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1224 rotatedHeight = newStream->width;
1225 rotatedWidth = newStream->height;
1226 }
1227
1228 /*
1229 * Sizes are different for each type of stream format check against
1230 * appropriate table.
1231 */
1232 switch (newStream->format) {
1233 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1234 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1235 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001236 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1237 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1238 mPDSupported) {
1239 if ((depthWidth == newStream->width) &&
1240 (depthHeight == newStream->height)) {
1241 sizeFound = true;
1242 }
1243 break;
1244 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001245 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1246 for (size_t i = 0; i < count; i++) {
1247 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1248 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1249 sizeFound = true;
1250 break;
1251 }
1252 }
1253 break;
1254 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1256 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001257 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001258 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001259 if ((depthSamplesCount == newStream->width) &&
1260 (1 == newStream->height)) {
1261 sizeFound = true;
1262 }
1263 break;
1264 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001265 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1266 /* Verify set size against generated sizes table */
1267 for (size_t i = 0; i < count; i++) {
1268 if (((int32_t)rotatedWidth ==
1269 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1270 ((int32_t)rotatedHeight ==
1271 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1272 sizeFound = true;
1273 break;
1274 }
1275 }
1276 break;
1277 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1278 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1279 default:
1280 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1281 || newStream->stream_type == CAMERA3_STREAM_INPUT
1282 || IS_USAGE_ZSL(newStream->usage)) {
1283 if (((int32_t)rotatedWidth ==
1284 gCamCapability[mCameraId]->active_array_size.width) &&
1285 ((int32_t)rotatedHeight ==
1286 gCamCapability[mCameraId]->active_array_size.height)) {
1287 sizeFound = true;
1288 break;
1289 }
1290 /* We could potentially break here to enforce ZSL stream
1291 * set from frameworks always is full active array size
1292 * but it is not clear from the spc if framework will always
1293 * follow that, also we have logic to override to full array
1294 * size, so keeping the logic lenient at the moment
1295 */
1296 }
1297 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1298 MAX_SIZES_CNT);
1299 for (size_t i = 0; i < count; i++) {
1300 if (((int32_t)rotatedWidth ==
1301 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1302 ((int32_t)rotatedHeight ==
1303 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1304 sizeFound = true;
1305 break;
1306 }
1307 }
1308 break;
1309 } /* End of switch(newStream->format) */
1310
1311 /* We error out even if a single stream has unsupported size set */
1312 if (!sizeFound) {
1313 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1314 rotatedWidth, rotatedHeight, newStream->format,
1315 gCamCapability[mCameraId]->active_array_size.width,
1316 gCamCapability[mCameraId]->active_array_size.height);
1317 rc = -EINVAL;
1318 break;
1319 }
1320 } /* End of for each stream */
1321 return rc;
1322}
1323
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001324/*===========================================================================
1325 * FUNCTION : validateUsageFlags
1326 *
1327 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1328 *
1329 * PARAMETERS :
1330 * @stream_list : streams to be configured
1331 *
1332 * RETURN :
1333 * NO_ERROR if the usage flags are supported
1334 * error code if usage flags are not supported
1335 *
1336 *==========================================================================*/
1337int QCamera3HardwareInterface::validateUsageFlags(
1338 const camera3_stream_configuration_t* streamList)
1339{
1340 for (size_t j = 0; j < streamList->num_streams; j++) {
1341 const camera3_stream_t *newStream = streamList->streams[j];
1342
1343 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1344 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1345 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1346 continue;
1347 }
1348
Jason Leec4cf5032017-05-24 18:31:41 -07001349 // Here we only care whether it's EIS3 or not
1350 char is_type_value[PROPERTY_VALUE_MAX];
1351 property_get("persist.camera.is_type", is_type_value, "4");
1352 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1353 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1354 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1355 isType = IS_TYPE_NONE;
1356
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001357 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1358 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1359 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1360 bool forcePreviewUBWC = true;
1361 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1362 forcePreviewUBWC = false;
1363 }
1364 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001365 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001366 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001367 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001369 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001370
1371 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1372 // So color spaces will always match.
1373
1374 // Check whether underlying formats of shared streams match.
1375 if (isVideo && isPreview && videoFormat != previewFormat) {
1376 LOGE("Combined video and preview usage flag is not supported");
1377 return -EINVAL;
1378 }
1379 if (isPreview && isZSL && previewFormat != zslFormat) {
1380 LOGE("Combined preview and zsl usage flag is not supported");
1381 return -EINVAL;
1382 }
1383 if (isVideo && isZSL && videoFormat != zslFormat) {
1384 LOGE("Combined video and zsl usage flag is not supported");
1385 return -EINVAL;
1386 }
1387 }
1388 return NO_ERROR;
1389}
1390
1391/*===========================================================================
1392 * FUNCTION : validateUsageFlagsForEis
1393 *
1394 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 *
1399 * RETURN :
1400 * NO_ERROR if the usage flags are supported
1401 * error code if usage flags are not supported
1402 *
1403 *==========================================================================*/
1404int QCamera3HardwareInterface::validateUsageFlagsForEis(
1405 const camera3_stream_configuration_t* streamList)
1406{
1407 for (size_t j = 0; j < streamList->num_streams; j++) {
1408 const camera3_stream_t *newStream = streamList->streams[j];
1409
1410 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1411 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1412
1413 // Because EIS is "hard-coded" for certain use case, and current
1414 // implementation doesn't support shared preview and video on the same
1415 // stream, return failure if EIS is forced on.
1416 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1417 LOGE("Combined video and preview usage flag is not supported due to EIS");
1418 return -EINVAL;
1419 }
1420 }
1421 return NO_ERROR;
1422}
1423
Thierry Strudel3d639192016-09-09 11:52:26 -07001424/*==============================================================================
1425 * FUNCTION : isSupportChannelNeeded
1426 *
1427 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1428 *
1429 * PARAMETERS :
1430 * @stream_list : streams to be configured
1431 * @stream_config_info : the config info for streams to be configured
1432 *
1433 * RETURN : Boolen true/false decision
1434 *
1435 *==========================================================================*/
1436bool QCamera3HardwareInterface::isSupportChannelNeeded(
1437 camera3_stream_configuration_t *streamList,
1438 cam_stream_size_info_t stream_config_info)
1439{
1440 uint32_t i;
1441 bool pprocRequested = false;
1442 /* Check for conditions where PProc pipeline does not have any streams*/
1443 for (i = 0; i < stream_config_info.num_streams; i++) {
1444 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1445 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1446 pprocRequested = true;
1447 break;
1448 }
1449 }
1450
1451 if (pprocRequested == false )
1452 return true;
1453
1454 /* Dummy stream needed if only raw or jpeg streams present */
1455 for (i = 0; i < streamList->num_streams; i++) {
1456 switch(streamList->streams[i]->format) {
1457 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1458 case HAL_PIXEL_FORMAT_RAW10:
1459 case HAL_PIXEL_FORMAT_RAW16:
1460 case HAL_PIXEL_FORMAT_BLOB:
1461 break;
1462 default:
1463 return false;
1464 }
1465 }
1466 return true;
1467}
1468
1469/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001470 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001471 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001472 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001473 *
1474 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001475 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001476 *
1477 * RETURN : int32_t type of status
1478 * NO_ERROR -- success
1479 * none-zero failure code
1480 *
1481 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001482int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001483{
1484 int32_t rc = NO_ERROR;
1485
1486 cam_dimension_t max_dim = {0, 0};
1487 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1488 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1489 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1490 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1491 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1492 }
1493
1494 clear_metadata_buffer(mParameters);
1495
1496 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1497 max_dim);
1498 if (rc != NO_ERROR) {
1499 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1500 return rc;
1501 }
1502
1503 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1504 if (rc != NO_ERROR) {
1505 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1506 return rc;
1507 }
1508
1509 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001510 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001511
1512 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1513 mParameters);
1514 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001515 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001516 return rc;
1517 }
1518
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001519 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001520 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1521 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1522 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1523 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1524 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001525
1526 return rc;
1527}
1528
1529/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001530 * FUNCTION : getCurrentSensorModeInfo
1531 *
1532 * DESCRIPTION: Get sensor mode information that is currently selected.
1533 *
1534 * PARAMETERS :
1535 * @sensorModeInfo : sensor mode information (output)
1536 *
1537 * RETURN : int32_t type of status
1538 * NO_ERROR -- success
1539 * none-zero failure code
1540 *
1541 *==========================================================================*/
1542int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1543{
1544 int32_t rc = NO_ERROR;
1545
1546 clear_metadata_buffer(mParameters);
1547 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1548
1549 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1550 mParameters);
1551 if (rc != NO_ERROR) {
1552 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1553 return rc;
1554 }
1555
1556 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1557 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1558 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1559 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1560 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1561 sensorModeInfo.num_raw_bits);
1562
1563 return rc;
1564}
1565
1566/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001567 * FUNCTION : addToPPFeatureMask
1568 *
1569 * DESCRIPTION: add additional features to pp feature mask based on
1570 * stream type and usecase
1571 *
1572 * PARAMETERS :
1573 * @stream_format : stream type for feature mask
1574 * @stream_idx : stream idx within postprocess_mask list to change
1575 *
1576 * RETURN : NULL
1577 *
1578 *==========================================================================*/
1579void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1580 uint32_t stream_idx)
1581{
1582 char feature_mask_value[PROPERTY_VALUE_MAX];
1583 cam_feature_mask_t feature_mask;
1584 int args_converted;
1585 int property_len;
1586
1587 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001588#ifdef _LE_CAMERA_
1589 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1590 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1591 property_len = property_get("persist.camera.hal3.feature",
1592 feature_mask_value, swtnr_feature_mask_value);
1593#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001594 property_len = property_get("persist.camera.hal3.feature",
1595 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001596#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001597 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1598 (feature_mask_value[1] == 'x')) {
1599 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1600 } else {
1601 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1602 }
1603 if (1 != args_converted) {
1604 feature_mask = 0;
1605 LOGE("Wrong feature mask %s", feature_mask_value);
1606 return;
1607 }
1608
1609 switch (stream_format) {
1610 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1611 /* Add LLVD to pp feature mask only if video hint is enabled */
1612 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1613 mStreamConfigInfo.postprocess_mask[stream_idx]
1614 |= CAM_QTI_FEATURE_SW_TNR;
1615 LOGH("Added SW TNR to pp feature mask");
1616 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1617 mStreamConfigInfo.postprocess_mask[stream_idx]
1618 |= CAM_QCOM_FEATURE_LLVD;
1619 LOGH("Added LLVD SeeMore to pp feature mask");
1620 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001621 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1622 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1623 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1624 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001625 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1626 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1627 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1628 CAM_QTI_FEATURE_BINNING_CORRECTION;
1629 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001630 break;
1631 }
1632 default:
1633 break;
1634 }
1635 LOGD("PP feature mask %llx",
1636 mStreamConfigInfo.postprocess_mask[stream_idx]);
1637}
1638
1639/*==============================================================================
1640 * FUNCTION : updateFpsInPreviewBuffer
1641 *
1642 * DESCRIPTION: update FPS information in preview buffer.
1643 *
1644 * PARAMETERS :
1645 * @metadata : pointer to metadata buffer
1646 * @frame_number: frame_number to look for in pending buffer list
1647 *
1648 * RETURN : None
1649 *
1650 *==========================================================================*/
1651void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1652 uint32_t frame_number)
1653{
1654 // Mark all pending buffers for this particular request
1655 // with corresponding framerate information
1656 for (List<PendingBuffersInRequest>::iterator req =
1657 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1658 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1659 for(List<PendingBufferInfo>::iterator j =
1660 req->mPendingBufferList.begin();
1661 j != req->mPendingBufferList.end(); j++) {
1662 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1663 if ((req->frame_number == frame_number) &&
1664 (channel->getStreamTypeMask() &
1665 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1666 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1667 CAM_INTF_PARM_FPS_RANGE, metadata) {
1668 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1669 struct private_handle_t *priv_handle =
1670 (struct private_handle_t *)(*(j->buffer));
1671 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1672 }
1673 }
1674 }
1675 }
1676}
1677
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001678/*==============================================================================
1679 * FUNCTION : updateTimeStampInPendingBuffers
1680 *
1681 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1682 * of a frame number
1683 *
1684 * PARAMETERS :
1685 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1686 * @timestamp : timestamp to be set
1687 *
1688 * RETURN : None
1689 *
1690 *==========================================================================*/
1691void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1692 uint32_t frameNumber, nsecs_t timestamp)
1693{
1694 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1695 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1696 if (req->frame_number != frameNumber)
1697 continue;
1698
1699 for (auto k = req->mPendingBufferList.begin();
1700 k != req->mPendingBufferList.end(); k++ ) {
1701 struct private_handle_t *priv_handle =
1702 (struct private_handle_t *) (*(k->buffer));
1703 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1704 }
1705 }
1706 return;
1707}
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709/*===========================================================================
1710 * FUNCTION : configureStreams
1711 *
1712 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1713 * and output streams.
1714 *
1715 * PARAMETERS :
1716 * @stream_list : streams to be configured
1717 *
1718 * RETURN :
1719 *
1720 *==========================================================================*/
1721int QCamera3HardwareInterface::configureStreams(
1722 camera3_stream_configuration_t *streamList)
1723{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001724 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001725 int rc = 0;
1726
1727 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001728 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001729 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001730 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001731
1732 return rc;
1733}
1734
1735/*===========================================================================
1736 * FUNCTION : configureStreamsPerfLocked
1737 *
1738 * DESCRIPTION: configureStreams while perfLock is held.
1739 *
1740 * PARAMETERS :
1741 * @stream_list : streams to be configured
1742 *
1743 * RETURN : int32_t type of status
1744 * NO_ERROR -- success
1745 * none-zero failure code
1746 *==========================================================================*/
1747int QCamera3HardwareInterface::configureStreamsPerfLocked(
1748 camera3_stream_configuration_t *streamList)
1749{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001750 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001751 int rc = 0;
1752
1753 // Sanity check stream_list
1754 if (streamList == NULL) {
1755 LOGE("NULL stream configuration");
1756 return BAD_VALUE;
1757 }
1758 if (streamList->streams == NULL) {
1759 LOGE("NULL stream list");
1760 return BAD_VALUE;
1761 }
1762
1763 if (streamList->num_streams < 1) {
1764 LOGE("Bad number of streams requested: %d",
1765 streamList->num_streams);
1766 return BAD_VALUE;
1767 }
1768
1769 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1770 LOGE("Maximum number of streams %d exceeded: %d",
1771 MAX_NUM_STREAMS, streamList->num_streams);
1772 return BAD_VALUE;
1773 }
1774
Jason Leec4cf5032017-05-24 18:31:41 -07001775 mOpMode = streamList->operation_mode;
1776 LOGD("mOpMode: %d", mOpMode);
1777
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001778 rc = validateUsageFlags(streamList);
1779 if (rc != NO_ERROR) {
1780 return rc;
1781 }
1782
Thierry Strudel3d639192016-09-09 11:52:26 -07001783 /* first invalidate all the steams in the mStreamList
1784 * if they appear again, they will be validated */
1785 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1786 it != mStreamInfo.end(); it++) {
1787 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1788 if (channel) {
1789 channel->stop();
1790 }
1791 (*it)->status = INVALID;
1792 }
1793
1794 if (mRawDumpChannel) {
1795 mRawDumpChannel->stop();
1796 delete mRawDumpChannel;
1797 mRawDumpChannel = NULL;
1798 }
1799
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001800 if (mHdrPlusRawSrcChannel) {
1801 mHdrPlusRawSrcChannel->stop();
1802 delete mHdrPlusRawSrcChannel;
1803 mHdrPlusRawSrcChannel = NULL;
1804 }
1805
Thierry Strudel3d639192016-09-09 11:52:26 -07001806 if (mSupportChannel)
1807 mSupportChannel->stop();
1808
1809 if (mAnalysisChannel) {
1810 mAnalysisChannel->stop();
1811 }
1812 if (mMetadataChannel) {
1813 /* If content of mStreamInfo is not 0, there is metadata stream */
1814 mMetadataChannel->stop();
1815 }
1816 if (mChannelHandle) {
1817 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001818 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 LOGD("stopping channel %d", mChannelHandle);
1820 }
1821
1822 pthread_mutex_lock(&mMutex);
1823
1824 // Check state
1825 switch (mState) {
1826 case INITIALIZED:
1827 case CONFIGURED:
1828 case STARTED:
1829 /* valid state */
1830 break;
1831 default:
1832 LOGE("Invalid state %d", mState);
1833 pthread_mutex_unlock(&mMutex);
1834 return -ENODEV;
1835 }
1836
1837 /* Check whether we have video stream */
1838 m_bIs4KVideo = false;
1839 m_bIsVideo = false;
1840 m_bEisSupportedSize = false;
1841 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001842 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001843 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001844 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001845 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001846 uint32_t videoWidth = 0U;
1847 uint32_t videoHeight = 0U;
1848 size_t rawStreamCnt = 0;
1849 size_t stallStreamCnt = 0;
1850 size_t processedStreamCnt = 0;
1851 // Number of streams on ISP encoder path
1852 size_t numStreamsOnEncoder = 0;
1853 size_t numYuv888OnEncoder = 0;
1854 bool bYuv888OverrideJpeg = false;
1855 cam_dimension_t largeYuv888Size = {0, 0};
1856 cam_dimension_t maxViewfinderSize = {0, 0};
1857 bool bJpegExceeds4K = false;
1858 bool bJpegOnEncoder = false;
1859 bool bUseCommonFeatureMask = false;
1860 cam_feature_mask_t commonFeatureMask = 0;
1861 bool bSmallJpegSize = false;
1862 uint32_t width_ratio;
1863 uint32_t height_ratio;
1864 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1865 camera3_stream_t *inputStream = NULL;
1866 bool isJpeg = false;
1867 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001868 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001869 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001870
1871 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1872
1873 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001874 uint8_t eis_prop_set;
1875 uint32_t maxEisWidth = 0;
1876 uint32_t maxEisHeight = 0;
1877
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001878 // Initialize all instant AEC related variables
1879 mInstantAEC = false;
1880 mResetInstantAEC = false;
1881 mInstantAECSettledFrameNumber = 0;
1882 mAecSkipDisplayFrameBound = 0;
1883 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001884 mCurrFeatureState = 0;
1885 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001886
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1888
1889 size_t count = IS_TYPE_MAX;
1890 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1891 for (size_t i = 0; i < count; i++) {
1892 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001893 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1894 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001895 break;
1896 }
1897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001898
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001899 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001900 maxEisWidth = MAX_EIS_WIDTH;
1901 maxEisHeight = MAX_EIS_HEIGHT;
1902 }
1903
1904 /* EIS setprop control */
1905 char eis_prop[PROPERTY_VALUE_MAX];
1906 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001907 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001908 eis_prop_set = (uint8_t)atoi(eis_prop);
1909
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001910 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001911 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1912
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001913 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1914 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001915
Thierry Strudel3d639192016-09-09 11:52:26 -07001916 /* stream configurations */
1917 for (size_t i = 0; i < streamList->num_streams; i++) {
1918 camera3_stream_t *newStream = streamList->streams[i];
1919 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1920 "height = %d, rotation = %d, usage = 0x%x",
1921 i, newStream->stream_type, newStream->format,
1922 newStream->width, newStream->height, newStream->rotation,
1923 newStream->usage);
1924 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1925 newStream->stream_type == CAMERA3_STREAM_INPUT){
1926 isZsl = true;
1927 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001928 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1929 IS_USAGE_PREVIEW(newStream->usage)) {
1930 isPreview = true;
1931 }
1932
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1934 inputStream = newStream;
1935 }
1936
Emilian Peev7650c122017-01-19 08:24:33 -08001937 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1938 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001939 isJpeg = true;
1940 jpegSize.width = newStream->width;
1941 jpegSize.height = newStream->height;
1942 if (newStream->width > VIDEO_4K_WIDTH ||
1943 newStream->height > VIDEO_4K_HEIGHT)
1944 bJpegExceeds4K = true;
1945 }
1946
1947 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1948 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1949 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001950 // In HAL3 we can have multiple different video streams.
1951 // The variables video width and height are used below as
1952 // dimensions of the biggest of them
1953 if (videoWidth < newStream->width ||
1954 videoHeight < newStream->height) {
1955 videoWidth = newStream->width;
1956 videoHeight = newStream->height;
1957 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001958 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1959 (VIDEO_4K_HEIGHT <= newStream->height)) {
1960 m_bIs4KVideo = true;
1961 }
1962 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1963 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001964
Thierry Strudel3d639192016-09-09 11:52:26 -07001965 }
1966 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1967 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1968 switch (newStream->format) {
1969 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001970 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1971 depthPresent = true;
1972 break;
1973 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001974 stallStreamCnt++;
1975 if (isOnEncoder(maxViewfinderSize, newStream->width,
1976 newStream->height)) {
1977 numStreamsOnEncoder++;
1978 bJpegOnEncoder = true;
1979 }
1980 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1981 newStream->width);
1982 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1983 newStream->height);;
1984 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1985 "FATAL: max_downscale_factor cannot be zero and so assert");
1986 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1987 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1988 LOGH("Setting small jpeg size flag to true");
1989 bSmallJpegSize = true;
1990 }
1991 break;
1992 case HAL_PIXEL_FORMAT_RAW10:
1993 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1994 case HAL_PIXEL_FORMAT_RAW16:
1995 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001996 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1997 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1998 pdStatCount++;
1999 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002000 break;
2001 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2002 processedStreamCnt++;
2003 if (isOnEncoder(maxViewfinderSize, newStream->width,
2004 newStream->height)) {
2005 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2006 !IS_USAGE_ZSL(newStream->usage)) {
2007 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2008 }
2009 numStreamsOnEncoder++;
2010 }
2011 break;
2012 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2013 processedStreamCnt++;
2014 if (isOnEncoder(maxViewfinderSize, newStream->width,
2015 newStream->height)) {
2016 // If Yuv888 size is not greater than 4K, set feature mask
2017 // to SUPERSET so that it support concurrent request on
2018 // YUV and JPEG.
2019 if (newStream->width <= VIDEO_4K_WIDTH &&
2020 newStream->height <= VIDEO_4K_HEIGHT) {
2021 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2022 }
2023 numStreamsOnEncoder++;
2024 numYuv888OnEncoder++;
2025 largeYuv888Size.width = newStream->width;
2026 largeYuv888Size.height = newStream->height;
2027 }
2028 break;
2029 default:
2030 processedStreamCnt++;
2031 if (isOnEncoder(maxViewfinderSize, newStream->width,
2032 newStream->height)) {
2033 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2034 numStreamsOnEncoder++;
2035 }
2036 break;
2037 }
2038
2039 }
2040 }
2041
2042 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2043 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2044 !m_bIsVideo) {
2045 m_bEisEnable = false;
2046 }
2047
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002048 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2049 pthread_mutex_unlock(&mMutex);
2050 return -EINVAL;
2051 }
2052
Thierry Strudel54dc9782017-02-15 12:12:10 -08002053 uint8_t forceEnableTnr = 0;
2054 char tnr_prop[PROPERTY_VALUE_MAX];
2055 memset(tnr_prop, 0, sizeof(tnr_prop));
2056 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2057 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2058
Thierry Strudel3d639192016-09-09 11:52:26 -07002059 /* Logic to enable/disable TNR based on specific config size/etc.*/
2060 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002061 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2062 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002063 else if (forceEnableTnr)
2064 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002065
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002066 char videoHdrProp[PROPERTY_VALUE_MAX];
2067 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2068 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2069 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2070
2071 if (hdr_mode_prop == 1 && m_bIsVideo &&
2072 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2073 m_bVideoHdrEnabled = true;
2074 else
2075 m_bVideoHdrEnabled = false;
2076
2077
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 /* Check if num_streams is sane */
2079 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2080 rawStreamCnt > MAX_RAW_STREAMS ||
2081 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2082 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2083 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2084 pthread_mutex_unlock(&mMutex);
2085 return -EINVAL;
2086 }
2087 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002088 if (isZsl && m_bIs4KVideo) {
2089 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002090 pthread_mutex_unlock(&mMutex);
2091 return -EINVAL;
2092 }
2093 /* Check if stream sizes are sane */
2094 if (numStreamsOnEncoder > 2) {
2095 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2096 pthread_mutex_unlock(&mMutex);
2097 return -EINVAL;
2098 } else if (1 < numStreamsOnEncoder){
2099 bUseCommonFeatureMask = true;
2100 LOGH("Multiple streams above max viewfinder size, common mask needed");
2101 }
2102
2103 /* Check if BLOB size is greater than 4k in 4k recording case */
2104 if (m_bIs4KVideo && bJpegExceeds4K) {
2105 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2106 pthread_mutex_unlock(&mMutex);
2107 return -EINVAL;
2108 }
2109
Emilian Peev7650c122017-01-19 08:24:33 -08002110 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2111 depthPresent) {
2112 LOGE("HAL doesn't support depth streams in HFR mode!");
2113 pthread_mutex_unlock(&mMutex);
2114 return -EINVAL;
2115 }
2116
Thierry Strudel3d639192016-09-09 11:52:26 -07002117 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2118 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2119 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2120 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2121 // configurations:
2122 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2123 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2124 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2125 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2126 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2127 __func__);
2128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131
2132 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2133 // the YUV stream's size is greater or equal to the JPEG size, set common
2134 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2135 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2136 jpegSize.width, jpegSize.height) &&
2137 largeYuv888Size.width > jpegSize.width &&
2138 largeYuv888Size.height > jpegSize.height) {
2139 bYuv888OverrideJpeg = true;
2140 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2141 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2142 }
2143
2144 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2145 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2146 commonFeatureMask);
2147 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2148 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2149
2150 rc = validateStreamDimensions(streamList);
2151 if (rc == NO_ERROR) {
2152 rc = validateStreamRotations(streamList);
2153 }
2154 if (rc != NO_ERROR) {
2155 LOGE("Invalid stream configuration requested!");
2156 pthread_mutex_unlock(&mMutex);
2157 return rc;
2158 }
2159
Emilian Peev0f3c3162017-03-15 12:57:46 +00002160 if (1 < pdStatCount) {
2161 LOGE("HAL doesn't support multiple PD streams");
2162 pthread_mutex_unlock(&mMutex);
2163 return -EINVAL;
2164 }
2165
2166 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2167 (1 == pdStatCount)) {
2168 LOGE("HAL doesn't support PD streams in HFR mode!");
2169 pthread_mutex_unlock(&mMutex);
2170 return -EINVAL;
2171 }
2172
Thierry Strudel3d639192016-09-09 11:52:26 -07002173 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2174 for (size_t i = 0; i < streamList->num_streams; i++) {
2175 camera3_stream_t *newStream = streamList->streams[i];
2176 LOGH("newStream type = %d, stream format = %d "
2177 "stream size : %d x %d, stream rotation = %d",
2178 newStream->stream_type, newStream->format,
2179 newStream->width, newStream->height, newStream->rotation);
2180 //if the stream is in the mStreamList validate it
2181 bool stream_exists = false;
2182 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2183 it != mStreamInfo.end(); it++) {
2184 if ((*it)->stream == newStream) {
2185 QCamera3ProcessingChannel *channel =
2186 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2187 stream_exists = true;
2188 if (channel)
2189 delete channel;
2190 (*it)->status = VALID;
2191 (*it)->stream->priv = NULL;
2192 (*it)->channel = NULL;
2193 }
2194 }
2195 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2196 //new stream
2197 stream_info_t* stream_info;
2198 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2199 if (!stream_info) {
2200 LOGE("Could not allocate stream info");
2201 rc = -ENOMEM;
2202 pthread_mutex_unlock(&mMutex);
2203 return rc;
2204 }
2205 stream_info->stream = newStream;
2206 stream_info->status = VALID;
2207 stream_info->channel = NULL;
2208 mStreamInfo.push_back(stream_info);
2209 }
2210 /* Covers Opaque ZSL and API1 F/W ZSL */
2211 if (IS_USAGE_ZSL(newStream->usage)
2212 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2213 if (zslStream != NULL) {
2214 LOGE("Multiple input/reprocess streams requested!");
2215 pthread_mutex_unlock(&mMutex);
2216 return BAD_VALUE;
2217 }
2218 zslStream = newStream;
2219 }
2220 /* Covers YUV reprocess */
2221 if (inputStream != NULL) {
2222 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2223 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2224 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2225 && inputStream->width == newStream->width
2226 && inputStream->height == newStream->height) {
2227 if (zslStream != NULL) {
2228 /* This scenario indicates multiple YUV streams with same size
2229 * as input stream have been requested, since zsl stream handle
2230 * is solely use for the purpose of overriding the size of streams
2231 * which share h/w streams we will just make a guess here as to
2232 * which of the stream is a ZSL stream, this will be refactored
2233 * once we make generic logic for streams sharing encoder output
2234 */
2235 LOGH("Warning, Multiple ip/reprocess streams requested!");
2236 }
2237 zslStream = newStream;
2238 }
2239 }
2240 }
2241
2242 /* If a zsl stream is set, we know that we have configured at least one input or
2243 bidirectional stream */
2244 if (NULL != zslStream) {
2245 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2246 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2247 mInputStreamInfo.format = zslStream->format;
2248 mInputStreamInfo.usage = zslStream->usage;
2249 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2250 mInputStreamInfo.dim.width,
2251 mInputStreamInfo.dim.height,
2252 mInputStreamInfo.format, mInputStreamInfo.usage);
2253 }
2254
2255 cleanAndSortStreamInfo();
2256 if (mMetadataChannel) {
2257 delete mMetadataChannel;
2258 mMetadataChannel = NULL;
2259 }
2260 if (mSupportChannel) {
2261 delete mSupportChannel;
2262 mSupportChannel = NULL;
2263 }
2264
2265 if (mAnalysisChannel) {
2266 delete mAnalysisChannel;
2267 mAnalysisChannel = NULL;
2268 }
2269
2270 if (mDummyBatchChannel) {
2271 delete mDummyBatchChannel;
2272 mDummyBatchChannel = NULL;
2273 }
2274
Emilian Peev7650c122017-01-19 08:24:33 -08002275 if (mDepthChannel) {
2276 mDepthChannel = NULL;
2277 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002278 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002279
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002280 mShutterDispatcher.clear();
2281 mOutputBufferDispatcher.clear();
2282
Thierry Strudel2896d122017-02-23 19:18:03 -08002283 char is_type_value[PROPERTY_VALUE_MAX];
2284 property_get("persist.camera.is_type", is_type_value, "4");
2285 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2286
Binhao Line406f062017-05-03 14:39:44 -07002287 char property_value[PROPERTY_VALUE_MAX];
2288 property_get("persist.camera.gzoom.at", property_value, "0");
2289 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002290 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2291 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2292 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2293 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002294
2295 property_get("persist.camera.gzoom.4k", property_value, "0");
2296 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2297
Thierry Strudel3d639192016-09-09 11:52:26 -07002298 //Create metadata channel and initialize it
2299 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2300 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2301 gCamCapability[mCameraId]->color_arrangement);
2302 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2303 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002304 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 if (mMetadataChannel == NULL) {
2306 LOGE("failed to allocate metadata channel");
2307 rc = -ENOMEM;
2308 pthread_mutex_unlock(&mMutex);
2309 return rc;
2310 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002311 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002312 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2313 if (rc < 0) {
2314 LOGE("metadata channel initialization failed");
2315 delete mMetadataChannel;
2316 mMetadataChannel = NULL;
2317 pthread_mutex_unlock(&mMutex);
2318 return rc;
2319 }
2320
Thierry Strudel2896d122017-02-23 19:18:03 -08002321 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002322 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002323 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002324 // Keep track of preview/video streams indices.
2325 // There could be more than one preview streams, but only one video stream.
2326 int32_t video_stream_idx = -1;
2327 int32_t preview_stream_idx[streamList->num_streams];
2328 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002329 bool previewTnr[streamList->num_streams];
2330 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2331 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2332 // Loop through once to determine preview TNR conditions before creating channels.
2333 for (size_t i = 0; i < streamList->num_streams; i++) {
2334 camera3_stream_t *newStream = streamList->streams[i];
2335 uint32_t stream_usage = newStream->usage;
2336 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2337 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2338 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2339 video_stream_idx = (int32_t)i;
2340 else
2341 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2342 }
2343 }
2344 // By default, preview stream TNR is disabled.
2345 // Enable TNR to the preview stream if all conditions below are satisfied:
2346 // 1. preview resolution == video resolution.
2347 // 2. video stream TNR is enabled.
2348 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2349 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2350 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2351 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2352 if (m_bTnrEnabled && m_bTnrVideo &&
2353 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2354 video_stream->width == preview_stream->width &&
2355 video_stream->height == preview_stream->height) {
2356 previewTnr[preview_stream_idx[i]] = true;
2357 }
2358 }
2359
Thierry Strudel3d639192016-09-09 11:52:26 -07002360 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2361 /* Allocate channel objects for the requested streams */
2362 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002363
Thierry Strudel3d639192016-09-09 11:52:26 -07002364 camera3_stream_t *newStream = streamList->streams[i];
2365 uint32_t stream_usage = newStream->usage;
2366 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2367 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2368 struct camera_info *p_info = NULL;
2369 pthread_mutex_lock(&gCamLock);
2370 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2371 pthread_mutex_unlock(&gCamLock);
2372 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2373 || IS_USAGE_ZSL(newStream->usage)) &&
2374 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002375 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002376 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002377 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2378 if (bUseCommonFeatureMask)
2379 zsl_ppmask = commonFeatureMask;
2380 else
2381 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002382 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002383 if (numStreamsOnEncoder > 0)
2384 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2385 else
2386 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002387 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002388 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002389 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002390 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002391 LOGH("Input stream configured, reprocess config");
2392 } else {
2393 //for non zsl streams find out the format
2394 switch (newStream->format) {
2395 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2396 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002397 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2399 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2400 /* add additional features to pp feature mask */
2401 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2402 mStreamConfigInfo.num_streams);
2403
2404 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2405 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2406 CAM_STREAM_TYPE_VIDEO;
2407 if (m_bTnrEnabled && m_bTnrVideo) {
2408 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2409 CAM_QCOM_FEATURE_CPP_TNR;
2410 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2411 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2412 ~CAM_QCOM_FEATURE_CDS;
2413 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2415 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2416 CAM_QTI_FEATURE_PPEISCORE;
2417 }
Binhao Line406f062017-05-03 14:39:44 -07002418 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2419 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2420 CAM_QCOM_FEATURE_GOOG_ZOOM;
2421 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002422 } else {
2423 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2424 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002425 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2427 CAM_QCOM_FEATURE_CPP_TNR;
2428 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2430 ~CAM_QCOM_FEATURE_CDS;
2431 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002432 if(!m_bSwTnrPreview) {
2433 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2434 ~CAM_QTI_FEATURE_SW_TNR;
2435 }
Binhao Line406f062017-05-03 14:39:44 -07002436 if (is_goog_zoom_preview_enabled) {
2437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2438 CAM_QCOM_FEATURE_GOOG_ZOOM;
2439 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002440 padding_info.width_padding = mSurfaceStridePadding;
2441 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002442 previewSize.width = (int32_t)newStream->width;
2443 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002444 }
2445 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2446 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2447 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2448 newStream->height;
2449 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2450 newStream->width;
2451 }
2452 }
2453 break;
2454 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002455 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002456 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2457 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2458 if (bUseCommonFeatureMask)
2459 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2460 commonFeatureMask;
2461 else
2462 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2463 CAM_QCOM_FEATURE_NONE;
2464 } else {
2465 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2466 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2467 }
2468 break;
2469 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002470 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002471 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2472 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2473 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2474 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2475 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002476 /* Remove rotation if it is not supported
2477 for 4K LiveVideo snapshot case (online processing) */
2478 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2479 CAM_QCOM_FEATURE_ROTATION)) {
2480 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2481 &= ~CAM_QCOM_FEATURE_ROTATION;
2482 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002483 } else {
2484 if (bUseCommonFeatureMask &&
2485 isOnEncoder(maxViewfinderSize, newStream->width,
2486 newStream->height)) {
2487 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2488 } else {
2489 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2490 }
2491 }
2492 if (isZsl) {
2493 if (zslStream) {
2494 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2495 (int32_t)zslStream->width;
2496 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2497 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002500 } else {
2501 LOGE("Error, No ZSL stream identified");
2502 pthread_mutex_unlock(&mMutex);
2503 return -EINVAL;
2504 }
2505 } else if (m_bIs4KVideo) {
2506 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2507 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2508 } else if (bYuv888OverrideJpeg) {
2509 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2510 (int32_t)largeYuv888Size.width;
2511 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2512 (int32_t)largeYuv888Size.height;
2513 }
2514 break;
2515 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2516 case HAL_PIXEL_FORMAT_RAW16:
2517 case HAL_PIXEL_FORMAT_RAW10:
2518 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2519 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2520 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002521 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2522 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2523 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2524 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2525 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2526 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2527 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2528 gCamCapability[mCameraId]->dt[mPDIndex];
2529 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2530 gCamCapability[mCameraId]->vc[mPDIndex];
2531 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002532 break;
2533 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002534 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002535 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2536 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2537 break;
2538 }
2539 }
2540
2541 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2542 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2543 gCamCapability[mCameraId]->color_arrangement);
2544
2545 if (newStream->priv == NULL) {
2546 //New stream, construct channel
2547 switch (newStream->stream_type) {
2548 case CAMERA3_STREAM_INPUT:
2549 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2550 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2551 break;
2552 case CAMERA3_STREAM_BIDIRECTIONAL:
2553 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2554 GRALLOC_USAGE_HW_CAMERA_WRITE;
2555 break;
2556 case CAMERA3_STREAM_OUTPUT:
2557 /* For video encoding stream, set read/write rarely
2558 * flag so that they may be set to un-cached */
2559 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2560 newStream->usage |=
2561 (GRALLOC_USAGE_SW_READ_RARELY |
2562 GRALLOC_USAGE_SW_WRITE_RARELY |
2563 GRALLOC_USAGE_HW_CAMERA_WRITE);
2564 else if (IS_USAGE_ZSL(newStream->usage))
2565 {
2566 LOGD("ZSL usage flag skipping");
2567 }
2568 else if (newStream == zslStream
2569 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2570 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2571 } else
2572 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2573 break;
2574 default:
2575 LOGE("Invalid stream_type %d", newStream->stream_type);
2576 break;
2577 }
2578
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002579 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002580 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2581 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2582 QCamera3ProcessingChannel *channel = NULL;
2583 switch (newStream->format) {
2584 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2585 if ((newStream->usage &
2586 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2587 (streamList->operation_mode ==
2588 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2589 ) {
2590 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2591 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002592 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002593 this,
2594 newStream,
2595 (cam_stream_type_t)
2596 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2597 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2598 mMetadataChannel,
2599 0); //heap buffers are not required for HFR video channel
2600 if (channel == NULL) {
2601 LOGE("allocation of channel failed");
2602 pthread_mutex_unlock(&mMutex);
2603 return -ENOMEM;
2604 }
2605 //channel->getNumBuffers() will return 0 here so use
2606 //MAX_INFLIGH_HFR_REQUESTS
2607 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2608 newStream->priv = channel;
2609 LOGI("num video buffers in HFR mode: %d",
2610 MAX_INFLIGHT_HFR_REQUESTS);
2611 } else {
2612 /* Copy stream contents in HFR preview only case to create
2613 * dummy batch channel so that sensor streaming is in
2614 * HFR mode */
2615 if (!m_bIsVideo && (streamList->operation_mode ==
2616 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2617 mDummyBatchStream = *newStream;
2618 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002619 int bufferCount = MAX_INFLIGHT_REQUESTS;
2620 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2621 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002622 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2623 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2624 bufferCount = m_bIs4KVideo ?
2625 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2626 }
2627
Thierry Strudel2896d122017-02-23 19:18:03 -08002628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002629 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2630 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002631 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 this,
2633 newStream,
2634 (cam_stream_type_t)
2635 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2636 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2637 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002638 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002639 if (channel == NULL) {
2640 LOGE("allocation of channel failed");
2641 pthread_mutex_unlock(&mMutex);
2642 return -ENOMEM;
2643 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002644 /* disable UBWC for preview, though supported,
2645 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002646 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002647 (previewSize.width == (int32_t)videoWidth)&&
2648 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002649 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002650 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002651 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002652 /* When goog_zoom is linked to the preview or video stream,
2653 * disable ubwc to the linked stream */
2654 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2655 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2656 channel->setUBWCEnabled(false);
2657 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002658 newStream->max_buffers = channel->getNumBuffers();
2659 newStream->priv = channel;
2660 }
2661 break;
2662 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2663 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2664 mChannelHandle,
2665 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002666 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002667 this,
2668 newStream,
2669 (cam_stream_type_t)
2670 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2671 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2672 mMetadataChannel);
2673 if (channel == NULL) {
2674 LOGE("allocation of YUV channel failed");
2675 pthread_mutex_unlock(&mMutex);
2676 return -ENOMEM;
2677 }
2678 newStream->max_buffers = channel->getNumBuffers();
2679 newStream->priv = channel;
2680 break;
2681 }
2682 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2683 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002684 case HAL_PIXEL_FORMAT_RAW10: {
2685 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2686 (HAL_DATASPACE_DEPTH != newStream->data_space))
2687 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002688 mRawChannel = new QCamera3RawChannel(
2689 mCameraHandle->camera_handle, mChannelHandle,
2690 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002691 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002692 this, newStream,
2693 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002694 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002695 if (mRawChannel == NULL) {
2696 LOGE("allocation of raw channel failed");
2697 pthread_mutex_unlock(&mMutex);
2698 return -ENOMEM;
2699 }
2700 newStream->max_buffers = mRawChannel->getNumBuffers();
2701 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2702 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002703 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002704 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002705 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2706 mDepthChannel = new QCamera3DepthChannel(
2707 mCameraHandle->camera_handle, mChannelHandle,
2708 mCameraHandle->ops, NULL, NULL, &padding_info,
2709 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2710 mMetadataChannel);
2711 if (NULL == mDepthChannel) {
2712 LOGE("Allocation of depth channel failed");
2713 pthread_mutex_unlock(&mMutex);
2714 return NO_MEMORY;
2715 }
2716 newStream->priv = mDepthChannel;
2717 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2718 } else {
2719 // Max live snapshot inflight buffer is 1. This is to mitigate
2720 // frame drop issues for video snapshot. The more buffers being
2721 // allocated, the more frame drops there are.
2722 mPictureChannel = new QCamera3PicChannel(
2723 mCameraHandle->camera_handle, mChannelHandle,
2724 mCameraHandle->ops, captureResultCb,
2725 setBufferErrorStatus, &padding_info, this, newStream,
2726 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2727 m_bIs4KVideo, isZsl, mMetadataChannel,
2728 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2729 if (mPictureChannel == NULL) {
2730 LOGE("allocation of channel failed");
2731 pthread_mutex_unlock(&mMutex);
2732 return -ENOMEM;
2733 }
2734 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2735 newStream->max_buffers = mPictureChannel->getNumBuffers();
2736 mPictureChannel->overrideYuvSize(
2737 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2738 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002739 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002740 break;
2741
2742 default:
2743 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002744 pthread_mutex_unlock(&mMutex);
2745 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002746 }
2747 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2748 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2749 } else {
2750 LOGE("Error, Unknown stream type");
2751 pthread_mutex_unlock(&mMutex);
2752 return -EINVAL;
2753 }
2754
2755 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002756 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002757 // Here we only care whether it's EIS3 or not
2758 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2759 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2760 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2761 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002762 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002763 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002764 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002765 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2766 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2767 }
2768 }
2769
2770 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2771 it != mStreamInfo.end(); it++) {
2772 if ((*it)->stream == newStream) {
2773 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2774 break;
2775 }
2776 }
2777 } else {
2778 // Channel already exists for this stream
2779 // Do nothing for now
2780 }
2781 padding_info = gCamCapability[mCameraId]->padding_info;
2782
Emilian Peev7650c122017-01-19 08:24:33 -08002783 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002784 * since there is no real stream associated with it
2785 */
Emilian Peev7650c122017-01-19 08:24:33 -08002786 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002787 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2788 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002789 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002790 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002791 }
2792
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002793 // Let buffer dispatcher know the configured streams.
2794 mOutputBufferDispatcher.configureStreams(streamList);
2795
Thierry Strudel2896d122017-02-23 19:18:03 -08002796 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2797 onlyRaw = false;
2798 }
2799
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002800 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002801 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002802 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002803 cam_analysis_info_t analysisInfo;
2804 int32_t ret = NO_ERROR;
2805 ret = mCommon.getAnalysisInfo(
2806 FALSE,
2807 analysisFeatureMask,
2808 &analysisInfo);
2809 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002810 cam_color_filter_arrangement_t analysis_color_arrangement =
2811 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2812 CAM_FILTER_ARRANGEMENT_Y :
2813 gCamCapability[mCameraId]->color_arrangement);
2814 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2815 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002816 cam_dimension_t analysisDim;
2817 analysisDim = mCommon.getMatchingDimension(previewSize,
2818 analysisInfo.analysis_recommended_res);
2819
2820 mAnalysisChannel = new QCamera3SupportChannel(
2821 mCameraHandle->camera_handle,
2822 mChannelHandle,
2823 mCameraHandle->ops,
2824 &analysisInfo.analysis_padding_info,
2825 analysisFeatureMask,
2826 CAM_STREAM_TYPE_ANALYSIS,
2827 &analysisDim,
2828 (analysisInfo.analysis_format
2829 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2830 : CAM_FORMAT_YUV_420_NV21),
2831 analysisInfo.hw_analysis_supported,
2832 gCamCapability[mCameraId]->color_arrangement,
2833 this,
2834 0); // force buffer count to 0
2835 } else {
2836 LOGW("getAnalysisInfo failed, ret = %d", ret);
2837 }
2838 if (!mAnalysisChannel) {
2839 LOGW("Analysis channel cannot be created");
2840 }
2841 }
2842
Thierry Strudel3d639192016-09-09 11:52:26 -07002843 //RAW DUMP channel
2844 if (mEnableRawDump && isRawStreamRequested == false){
2845 cam_dimension_t rawDumpSize;
2846 rawDumpSize = getMaxRawSize(mCameraId);
2847 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2848 setPAAFSupport(rawDumpFeatureMask,
2849 CAM_STREAM_TYPE_RAW,
2850 gCamCapability[mCameraId]->color_arrangement);
2851 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2852 mChannelHandle,
2853 mCameraHandle->ops,
2854 rawDumpSize,
2855 &padding_info,
2856 this, rawDumpFeatureMask);
2857 if (!mRawDumpChannel) {
2858 LOGE("Raw Dump channel cannot be created");
2859 pthread_mutex_unlock(&mMutex);
2860 return -ENOMEM;
2861 }
2862 }
2863
Thierry Strudel3d639192016-09-09 11:52:26 -07002864 if (mAnalysisChannel) {
2865 cam_analysis_info_t analysisInfo;
2866 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2867 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2868 CAM_STREAM_TYPE_ANALYSIS;
2869 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2870 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002871 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002872 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2873 &analysisInfo);
2874 if (rc != NO_ERROR) {
2875 LOGE("getAnalysisInfo failed, ret = %d", rc);
2876 pthread_mutex_unlock(&mMutex);
2877 return rc;
2878 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002879 cam_color_filter_arrangement_t analysis_color_arrangement =
2880 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2881 CAM_FILTER_ARRANGEMENT_Y :
2882 gCamCapability[mCameraId]->color_arrangement);
2883 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2884 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2885 analysis_color_arrangement);
2886
Thierry Strudel3d639192016-09-09 11:52:26 -07002887 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002888 mCommon.getMatchingDimension(previewSize,
2889 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002890 mStreamConfigInfo.num_streams++;
2891 }
2892
Thierry Strudel2896d122017-02-23 19:18:03 -08002893 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002894 cam_analysis_info_t supportInfo;
2895 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2896 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2897 setPAAFSupport(callbackFeatureMask,
2898 CAM_STREAM_TYPE_CALLBACK,
2899 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002900 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002901 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002902 if (ret != NO_ERROR) {
2903 /* Ignore the error for Mono camera
2904 * because the PAAF bit mask is only set
2905 * for CAM_STREAM_TYPE_ANALYSIS stream type
2906 */
2907 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2908 LOGW("getAnalysisInfo failed, ret = %d", ret);
2909 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002910 }
2911 mSupportChannel = new QCamera3SupportChannel(
2912 mCameraHandle->camera_handle,
2913 mChannelHandle,
2914 mCameraHandle->ops,
2915 &gCamCapability[mCameraId]->padding_info,
2916 callbackFeatureMask,
2917 CAM_STREAM_TYPE_CALLBACK,
2918 &QCamera3SupportChannel::kDim,
2919 CAM_FORMAT_YUV_420_NV21,
2920 supportInfo.hw_analysis_supported,
2921 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002922 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002923 if (!mSupportChannel) {
2924 LOGE("dummy channel cannot be created");
2925 pthread_mutex_unlock(&mMutex);
2926 return -ENOMEM;
2927 }
2928 }
2929
2930 if (mSupportChannel) {
2931 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2932 QCamera3SupportChannel::kDim;
2933 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2934 CAM_STREAM_TYPE_CALLBACK;
2935 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2936 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2937 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2938 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2939 gCamCapability[mCameraId]->color_arrangement);
2940 mStreamConfigInfo.num_streams++;
2941 }
2942
2943 if (mRawDumpChannel) {
2944 cam_dimension_t rawSize;
2945 rawSize = getMaxRawSize(mCameraId);
2946 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2947 rawSize;
2948 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2949 CAM_STREAM_TYPE_RAW;
2950 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2951 CAM_QCOM_FEATURE_NONE;
2952 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2953 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2954 gCamCapability[mCameraId]->color_arrangement);
2955 mStreamConfigInfo.num_streams++;
2956 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002957
2958 if (mHdrPlusRawSrcChannel) {
2959 cam_dimension_t rawSize;
2960 rawSize = getMaxRawSize(mCameraId);
2961 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2962 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2963 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2964 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2965 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2966 gCamCapability[mCameraId]->color_arrangement);
2967 mStreamConfigInfo.num_streams++;
2968 }
2969
Thierry Strudel3d639192016-09-09 11:52:26 -07002970 /* In HFR mode, if video stream is not added, create a dummy channel so that
2971 * ISP can create a batch mode even for preview only case. This channel is
2972 * never 'start'ed (no stream-on), it is only 'initialized' */
2973 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2974 !m_bIsVideo) {
2975 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2976 setPAAFSupport(dummyFeatureMask,
2977 CAM_STREAM_TYPE_VIDEO,
2978 gCamCapability[mCameraId]->color_arrangement);
2979 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2980 mChannelHandle,
2981 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002982 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002983 this,
2984 &mDummyBatchStream,
2985 CAM_STREAM_TYPE_VIDEO,
2986 dummyFeatureMask,
2987 mMetadataChannel);
2988 if (NULL == mDummyBatchChannel) {
2989 LOGE("creation of mDummyBatchChannel failed."
2990 "Preview will use non-hfr sensor mode ");
2991 }
2992 }
2993 if (mDummyBatchChannel) {
2994 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2995 mDummyBatchStream.width;
2996 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2997 mDummyBatchStream.height;
2998 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2999 CAM_STREAM_TYPE_VIDEO;
3000 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3001 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3002 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3003 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3004 gCamCapability[mCameraId]->color_arrangement);
3005 mStreamConfigInfo.num_streams++;
3006 }
3007
3008 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3009 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003010 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003011 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003012
3013 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3014 for (pendingRequestIterator i = mPendingRequestsList.begin();
3015 i != mPendingRequestsList.end();) {
3016 i = erasePendingRequest(i);
3017 }
3018 mPendingFrameDropList.clear();
3019 // Initialize/Reset the pending buffers list
3020 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3021 req.mPendingBufferList.clear();
3022 }
3023 mPendingBuffersMap.mPendingBuffersInRequest.clear();
3024
Thierry Strudel3d639192016-09-09 11:52:26 -07003025 mCurJpegMeta.clear();
3026 //Get min frame duration for this streams configuration
3027 deriveMinFrameDuration();
3028
Chien-Yu Chenee335912017-02-09 17:53:20 -08003029 mFirstPreviewIntentSeen = false;
3030
3031 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003032 {
3033 Mutex::Autolock l(gHdrPlusClientLock);
3034 disableHdrPlusModeLocked();
3035 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003036
Thierry Strudel3d639192016-09-09 11:52:26 -07003037 // Update state
3038 mState = CONFIGURED;
3039
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003040 mFirstMetadataCallback = true;
3041
Thierry Strudel3d639192016-09-09 11:52:26 -07003042 pthread_mutex_unlock(&mMutex);
3043
3044 return rc;
3045}
3046
3047/*===========================================================================
3048 * FUNCTION : validateCaptureRequest
3049 *
3050 * DESCRIPTION: validate a capture request from camera service
3051 *
3052 * PARAMETERS :
3053 * @request : request from framework to process
3054 *
3055 * RETURN :
3056 *
3057 *==========================================================================*/
3058int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003059 camera3_capture_request_t *request,
3060 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003061{
3062 ssize_t idx = 0;
3063 const camera3_stream_buffer_t *b;
3064 CameraMetadata meta;
3065
3066 /* Sanity check the request */
3067 if (request == NULL) {
3068 LOGE("NULL capture request");
3069 return BAD_VALUE;
3070 }
3071
3072 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3073 /*settings cannot be null for the first request*/
3074 return BAD_VALUE;
3075 }
3076
3077 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003078 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3079 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003080 LOGE("Request %d: No output buffers provided!",
3081 __FUNCTION__, frameNumber);
3082 return BAD_VALUE;
3083 }
3084 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3085 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3086 request->num_output_buffers, MAX_NUM_STREAMS);
3087 return BAD_VALUE;
3088 }
3089 if (request->input_buffer != NULL) {
3090 b = request->input_buffer;
3091 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3092 LOGE("Request %d: Buffer %ld: Status not OK!",
3093 frameNumber, (long)idx);
3094 return BAD_VALUE;
3095 }
3096 if (b->release_fence != -1) {
3097 LOGE("Request %d: Buffer %ld: Has a release fence!",
3098 frameNumber, (long)idx);
3099 return BAD_VALUE;
3100 }
3101 if (b->buffer == NULL) {
3102 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3103 frameNumber, (long)idx);
3104 return BAD_VALUE;
3105 }
3106 }
3107
3108 // Validate all buffers
3109 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003110 if (b == NULL) {
3111 return BAD_VALUE;
3112 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003113 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003114 QCamera3ProcessingChannel *channel =
3115 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3116 if (channel == NULL) {
3117 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3118 frameNumber, (long)idx);
3119 return BAD_VALUE;
3120 }
3121 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3122 LOGE("Request %d: Buffer %ld: Status not OK!",
3123 frameNumber, (long)idx);
3124 return BAD_VALUE;
3125 }
3126 if (b->release_fence != -1) {
3127 LOGE("Request %d: Buffer %ld: Has a release fence!",
3128 frameNumber, (long)idx);
3129 return BAD_VALUE;
3130 }
3131 if (b->buffer == NULL) {
3132 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3133 frameNumber, (long)idx);
3134 return BAD_VALUE;
3135 }
3136 if (*(b->buffer) == NULL) {
3137 LOGE("Request %d: Buffer %ld: NULL private handle!",
3138 frameNumber, (long)idx);
3139 return BAD_VALUE;
3140 }
3141 idx++;
3142 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003143 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003144 return NO_ERROR;
3145}
3146
3147/*===========================================================================
3148 * FUNCTION : deriveMinFrameDuration
3149 *
3150 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3151 * on currently configured streams.
3152 *
3153 * PARAMETERS : NONE
3154 *
3155 * RETURN : NONE
3156 *
3157 *==========================================================================*/
3158void QCamera3HardwareInterface::deriveMinFrameDuration()
3159{
3160 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003161 bool hasRaw = false;
3162
3163 mMinRawFrameDuration = 0;
3164 mMinJpegFrameDuration = 0;
3165 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003166
3167 maxJpegDim = 0;
3168 maxProcessedDim = 0;
3169 maxRawDim = 0;
3170
3171 // Figure out maximum jpeg, processed, and raw dimensions
3172 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3173 it != mStreamInfo.end(); it++) {
3174
3175 // Input stream doesn't have valid stream_type
3176 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3177 continue;
3178
3179 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3180 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3181 if (dimension > maxJpegDim)
3182 maxJpegDim = dimension;
3183 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3184 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3185 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003186 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003187 if (dimension > maxRawDim)
3188 maxRawDim = dimension;
3189 } else {
3190 if (dimension > maxProcessedDim)
3191 maxProcessedDim = dimension;
3192 }
3193 }
3194
3195 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3196 MAX_SIZES_CNT);
3197
3198 //Assume all jpeg dimensions are in processed dimensions.
3199 if (maxJpegDim > maxProcessedDim)
3200 maxProcessedDim = maxJpegDim;
3201 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003202 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003203 maxRawDim = INT32_MAX;
3204
3205 for (size_t i = 0; i < count; i++) {
3206 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3207 gCamCapability[mCameraId]->raw_dim[i].height;
3208 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3209 maxRawDim = dimension;
3210 }
3211 }
3212
3213 //Find minimum durations for processed, jpeg, and raw
3214 for (size_t i = 0; i < count; i++) {
3215 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3216 gCamCapability[mCameraId]->raw_dim[i].height) {
3217 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3218 break;
3219 }
3220 }
3221 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3222 for (size_t i = 0; i < count; i++) {
3223 if (maxProcessedDim ==
3224 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3225 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3226 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3227 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3228 break;
3229 }
3230 }
3231}
3232
3233/*===========================================================================
3234 * FUNCTION : getMinFrameDuration
3235 *
3236 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3237 * and current request configuration.
3238 *
3239 * PARAMETERS : @request: requset sent by the frameworks
3240 *
3241 * RETURN : min farme duration for a particular request
3242 *
3243 *==========================================================================*/
3244int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3245{
3246 bool hasJpegStream = false;
3247 bool hasRawStream = false;
3248 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3249 const camera3_stream_t *stream = request->output_buffers[i].stream;
3250 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3251 hasJpegStream = true;
3252 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3253 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3254 stream->format == HAL_PIXEL_FORMAT_RAW16)
3255 hasRawStream = true;
3256 }
3257
3258 if (!hasJpegStream)
3259 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3260 else
3261 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3262}
3263
3264/*===========================================================================
3265 * FUNCTION : handleBuffersDuringFlushLock
3266 *
3267 * DESCRIPTION: Account for buffers returned from back-end during flush
3268 * This function is executed while mMutex is held by the caller.
3269 *
3270 * PARAMETERS :
3271 * @buffer: image buffer for the callback
3272 *
3273 * RETURN :
3274 *==========================================================================*/
3275void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3276{
3277 bool buffer_found = false;
3278 for (List<PendingBuffersInRequest>::iterator req =
3279 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3280 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3281 for (List<PendingBufferInfo>::iterator i =
3282 req->mPendingBufferList.begin();
3283 i != req->mPendingBufferList.end(); i++) {
3284 if (i->buffer == buffer->buffer) {
3285 mPendingBuffersMap.numPendingBufsAtFlush--;
3286 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3287 buffer->buffer, req->frame_number,
3288 mPendingBuffersMap.numPendingBufsAtFlush);
3289 buffer_found = true;
3290 break;
3291 }
3292 }
3293 if (buffer_found) {
3294 break;
3295 }
3296 }
3297 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3298 //signal the flush()
3299 LOGD("All buffers returned to HAL. Continue flush");
3300 pthread_cond_signal(&mBuffersCond);
3301 }
3302}
3303
Thierry Strudel3d639192016-09-09 11:52:26 -07003304/*===========================================================================
3305 * FUNCTION : handleBatchMetadata
3306 *
3307 * DESCRIPTION: Handles metadata buffer callback in batch mode
3308 *
3309 * PARAMETERS : @metadata_buf: metadata buffer
3310 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3311 * the meta buf in this method
3312 *
3313 * RETURN :
3314 *
3315 *==========================================================================*/
3316void QCamera3HardwareInterface::handleBatchMetadata(
3317 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3318{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003319 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003320
3321 if (NULL == metadata_buf) {
3322 LOGE("metadata_buf is NULL");
3323 return;
3324 }
3325 /* In batch mode, the metdata will contain the frame number and timestamp of
3326 * the last frame in the batch. Eg: a batch containing buffers from request
3327 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3328 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3329 * multiple process_capture_results */
3330 metadata_buffer_t *metadata =
3331 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3332 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3333 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3334 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3335 uint32_t frame_number = 0, urgent_frame_number = 0;
3336 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3337 bool invalid_metadata = false;
3338 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3339 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003340 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003341
3342 int32_t *p_frame_number_valid =
3343 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3344 uint32_t *p_frame_number =
3345 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3346 int64_t *p_capture_time =
3347 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3348 int32_t *p_urgent_frame_number_valid =
3349 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3350 uint32_t *p_urgent_frame_number =
3351 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3352
3353 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3354 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3355 (NULL == p_urgent_frame_number)) {
3356 LOGE("Invalid metadata");
3357 invalid_metadata = true;
3358 } else {
3359 frame_number_valid = *p_frame_number_valid;
3360 last_frame_number = *p_frame_number;
3361 last_frame_capture_time = *p_capture_time;
3362 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3363 last_urgent_frame_number = *p_urgent_frame_number;
3364 }
3365
3366 /* In batchmode, when no video buffers are requested, set_parms are sent
3367 * for every capture_request. The difference between consecutive urgent
3368 * frame numbers and frame numbers should be used to interpolate the
3369 * corresponding frame numbers and time stamps */
3370 pthread_mutex_lock(&mMutex);
3371 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003372 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3373 if(idx < 0) {
3374 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3375 last_urgent_frame_number);
3376 mState = ERROR;
3377 pthread_mutex_unlock(&mMutex);
3378 return;
3379 }
3380 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003381 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3382 first_urgent_frame_number;
3383
3384 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3385 urgent_frame_number_valid,
3386 first_urgent_frame_number, last_urgent_frame_number);
3387 }
3388
3389 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003390 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3391 if(idx < 0) {
3392 LOGE("Invalid frame number received: %d. Irrecoverable error",
3393 last_frame_number);
3394 mState = ERROR;
3395 pthread_mutex_unlock(&mMutex);
3396 return;
3397 }
3398 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003399 frameNumDiff = last_frame_number + 1 -
3400 first_frame_number;
3401 mPendingBatchMap.removeItem(last_frame_number);
3402
3403 LOGD("frm: valid: %d frm_num: %d - %d",
3404 frame_number_valid,
3405 first_frame_number, last_frame_number);
3406
3407 }
3408 pthread_mutex_unlock(&mMutex);
3409
3410 if (urgent_frame_number_valid || frame_number_valid) {
3411 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3412 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3413 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3414 urgentFrameNumDiff, last_urgent_frame_number);
3415 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3416 LOGE("frameNumDiff: %d frameNum: %d",
3417 frameNumDiff, last_frame_number);
3418 }
3419
3420 for (size_t i = 0; i < loopCount; i++) {
3421 /* handleMetadataWithLock is called even for invalid_metadata for
3422 * pipeline depth calculation */
3423 if (!invalid_metadata) {
3424 /* Infer frame number. Batch metadata contains frame number of the
3425 * last frame */
3426 if (urgent_frame_number_valid) {
3427 if (i < urgentFrameNumDiff) {
3428 urgent_frame_number =
3429 first_urgent_frame_number + i;
3430 LOGD("inferred urgent frame_number: %d",
3431 urgent_frame_number);
3432 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3433 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3434 } else {
3435 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3436 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3437 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3438 }
3439 }
3440
3441 /* Infer frame number. Batch metadata contains frame number of the
3442 * last frame */
3443 if (frame_number_valid) {
3444 if (i < frameNumDiff) {
3445 frame_number = first_frame_number + i;
3446 LOGD("inferred frame_number: %d", frame_number);
3447 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3448 CAM_INTF_META_FRAME_NUMBER, frame_number);
3449 } else {
3450 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3451 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3452 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3453 }
3454 }
3455
3456 if (last_frame_capture_time) {
3457 //Infer timestamp
3458 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003459 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003460 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003461 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003462 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3463 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3464 LOGD("batch capture_time: %lld, capture_time: %lld",
3465 last_frame_capture_time, capture_time);
3466 }
3467 }
3468 pthread_mutex_lock(&mMutex);
3469 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003470 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003471 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3472 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003473 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003474 pthread_mutex_unlock(&mMutex);
3475 }
3476
3477 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003478 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003479 mMetadataChannel->bufDone(metadata_buf);
3480 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003481 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003482 }
3483}
3484
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003485void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3486 camera3_error_msg_code_t errorCode)
3487{
3488 camera3_notify_msg_t notify_msg;
3489 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3490 notify_msg.type = CAMERA3_MSG_ERROR;
3491 notify_msg.message.error.error_code = errorCode;
3492 notify_msg.message.error.error_stream = NULL;
3493 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003494 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003495
3496 return;
3497}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003498
3499/*===========================================================================
3500 * FUNCTION : sendPartialMetadataWithLock
3501 *
3502 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3503 *
3504 * PARAMETERS : @metadata: metadata buffer
3505 * @requestIter: The iterator for the pending capture request for
3506 * which the partial result is being sen
3507 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3508 * last urgent metadata in a batch. Always true for non-batch mode
3509 *
3510 * RETURN :
3511 *
3512 *==========================================================================*/
3513
3514void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3515 metadata_buffer_t *metadata,
3516 const pendingRequestIterator requestIter,
3517 bool lastUrgentMetadataInBatch)
3518{
3519 camera3_capture_result_t result;
3520 memset(&result, 0, sizeof(camera3_capture_result_t));
3521
3522 requestIter->partial_result_cnt++;
3523
3524 // Extract 3A metadata
3525 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003526 metadata, lastUrgentMetadataInBatch, requestIter->frame_number);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003527 // Populate metadata result
3528 result.frame_number = requestIter->frame_number;
3529 result.num_output_buffers = 0;
3530 result.output_buffers = NULL;
3531 result.partial_result = requestIter->partial_result_cnt;
3532
3533 {
3534 Mutex::Autolock l(gHdrPlusClientLock);
3535 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3536 // Notify HDR+ client about the partial metadata.
3537 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3538 result.partial_result == PARTIAL_RESULT_COUNT);
3539 }
3540 }
3541
3542 orchestrateResult(&result);
3543 LOGD("urgent frame_number = %u", result.frame_number);
3544 free_camera_metadata((camera_metadata_t *)result.result);
3545}
3546
Thierry Strudel3d639192016-09-09 11:52:26 -07003547/*===========================================================================
3548 * FUNCTION : handleMetadataWithLock
3549 *
3550 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3551 *
3552 * PARAMETERS : @metadata_buf: metadata buffer
3553 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3554 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003555 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3556 * last urgent metadata in a batch. Always true for non-batch mode
3557 * @lastMetadataInBatch: Boolean to indicate whether this is the
3558 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003559 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3560 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003561 *
3562 * RETURN :
3563 *
3564 *==========================================================================*/
3565void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003566 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003567 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3568 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003569{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003570 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003571 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3572 //during flush do not send metadata from this thread
3573 LOGD("not sending metadata during flush or when mState is error");
3574 if (free_and_bufdone_meta_buf) {
3575 mMetadataChannel->bufDone(metadata_buf);
3576 free(metadata_buf);
3577 }
3578 return;
3579 }
3580
3581 //not in flush
3582 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3583 int32_t frame_number_valid, urgent_frame_number_valid;
3584 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003585 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003586 nsecs_t currentSysTime;
3587
3588 int32_t *p_frame_number_valid =
3589 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3590 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3591 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003592 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003593 int32_t *p_urgent_frame_number_valid =
3594 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3595 uint32_t *p_urgent_frame_number =
3596 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3597 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3598 metadata) {
3599 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3600 *p_frame_number_valid, *p_frame_number);
3601 }
3602
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003603 camera_metadata_t *resultMetadata = nullptr;
3604
Thierry Strudel3d639192016-09-09 11:52:26 -07003605 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3606 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3607 LOGE("Invalid metadata");
3608 if (free_and_bufdone_meta_buf) {
3609 mMetadataChannel->bufDone(metadata_buf);
3610 free(metadata_buf);
3611 }
3612 goto done_metadata;
3613 }
3614 frame_number_valid = *p_frame_number_valid;
3615 frame_number = *p_frame_number;
3616 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003617 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003618 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3619 urgent_frame_number = *p_urgent_frame_number;
3620 currentSysTime = systemTime(CLOCK_MONOTONIC);
3621
Jason Lee603176d2017-05-31 11:43:27 -07003622 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3623 const int tries = 3;
3624 nsecs_t bestGap, measured;
3625 for (int i = 0; i < tries; ++i) {
3626 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3627 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3628 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3629 const nsecs_t gap = tmono2 - tmono;
3630 if (i == 0 || gap < bestGap) {
3631 bestGap = gap;
3632 measured = tbase - ((tmono + tmono2) >> 1);
3633 }
3634 }
3635 capture_time -= measured;
3636 }
3637
Thierry Strudel3d639192016-09-09 11:52:26 -07003638 // Detect if buffers from any requests are overdue
3639 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003640 int64_t timeout;
3641 {
3642 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3643 // If there is a pending HDR+ request, the following requests may be blocked until the
3644 // HDR+ request is done. So allow a longer timeout.
3645 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3646 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3647 }
3648
3649 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003650 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003651 assert(missed.stream->priv);
3652 if (missed.stream->priv) {
3653 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3654 assert(ch->mStreams[0]);
3655 if (ch->mStreams[0]) {
3656 LOGE("Cancel missing frame = %d, buffer = %p,"
3657 "stream type = %d, stream format = %d",
3658 req.frame_number, missed.buffer,
3659 ch->mStreams[0]->getMyType(), missed.stream->format);
3660 ch->timeoutFrame(req.frame_number);
3661 }
3662 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003663 }
3664 }
3665 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003666 //For the very first metadata callback, regardless whether it contains valid
3667 //frame number, send the partial metadata for the jumpstarting requests.
3668 //Note that this has to be done even if the metadata doesn't contain valid
3669 //urgent frame number, because in the case only 1 request is ever submitted
3670 //to HAL, there won't be subsequent valid urgent frame number.
3671 if (mFirstMetadataCallback) {
3672 for (pendingRequestIterator i =
3673 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3674 if (i->bUseFirstPartial) {
3675 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3676 }
3677 }
3678 mFirstMetadataCallback = false;
3679 }
3680
Thierry Strudel3d639192016-09-09 11:52:26 -07003681 //Partial result on process_capture_result for timestamp
3682 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003683 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003684
3685 //Recieved an urgent Frame Number, handle it
3686 //using partial results
3687 for (pendingRequestIterator i =
3688 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3689 LOGD("Iterator Frame = %d urgent frame = %d",
3690 i->frame_number, urgent_frame_number);
3691
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003692 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 (i->partial_result_cnt == 0)) {
3694 LOGE("Error: HAL missed urgent metadata for frame number %d",
3695 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003696 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003697 }
3698
3699 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003700 i->partial_result_cnt == 0) {
3701 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003702 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3703 // Instant AEC settled for this frame.
3704 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3705 mInstantAECSettledFrameNumber = urgent_frame_number;
3706 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003707 break;
3708 }
3709 }
3710 }
3711
3712 if (!frame_number_valid) {
3713 LOGD("Not a valid normal frame number, used as SOF only");
3714 if (free_and_bufdone_meta_buf) {
3715 mMetadataChannel->bufDone(metadata_buf);
3716 free(metadata_buf);
3717 }
3718 goto done_metadata;
3719 }
3720 LOGH("valid frame_number = %u, capture_time = %lld",
3721 frame_number, capture_time);
3722
Emilian Peev4e0fe952017-06-30 12:40:09 -07003723 handleDepthDataLocked(metadata->depth_data, frame_number,
3724 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003725
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003726 // Check whether any stream buffer corresponding to this is dropped or not
3727 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3728 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3729 for (auto & pendingRequest : mPendingRequestsList) {
3730 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3731 mInstantAECSettledFrameNumber)) {
3732 camera3_notify_msg_t notify_msg = {};
3733 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003734 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003735 QCamera3ProcessingChannel *channel =
3736 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003737 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003738 if (p_cam_frame_drop) {
3739 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003740 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003741 // Got the stream ID for drop frame.
3742 dropFrame = true;
3743 break;
3744 }
3745 }
3746 } else {
3747 // This is instant AEC case.
3748 // For instant AEC drop the stream untill AEC is settled.
3749 dropFrame = true;
3750 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003751
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003752 if (dropFrame) {
3753 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3754 if (p_cam_frame_drop) {
3755 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003756 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003757 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003758 } else {
3759 // For instant AEC, inform frame drop and frame number
3760 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3761 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003762 pendingRequest.frame_number, streamID,
3763 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003764 }
3765 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003766 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003767 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003768 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003769 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003770 if (p_cam_frame_drop) {
3771 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003772 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003773 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003774 } else {
3775 // For instant AEC, inform frame drop and frame number
3776 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3777 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003778 pendingRequest.frame_number, streamID,
3779 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003780 }
3781 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003782 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003783 PendingFrameDrop.stream_ID = streamID;
3784 // Add the Frame drop info to mPendingFrameDropList
3785 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003787 }
3788 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003789 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003790
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003791 for (auto & pendingRequest : mPendingRequestsList) {
3792 // Find the pending request with the frame number.
3793 if (pendingRequest.frame_number == frame_number) {
3794 // Update the sensor timestamp.
3795 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003796
Thierry Strudel3d639192016-09-09 11:52:26 -07003797
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003798 /* Set the timestamp in display metadata so that clients aware of
3799 private_handle such as VT can use this un-modified timestamps.
3800 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003801 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003802
Thierry Strudel3d639192016-09-09 11:52:26 -07003803 // Find channel requiring metadata, meaning internal offline postprocess
3804 // is needed.
3805 //TODO: for now, we don't support two streams requiring metadata at the same time.
3806 // (because we are not making copies, and metadata buffer is not reference counted.
3807 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003808 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3809 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003810 if (iter->need_metadata) {
3811 internalPproc = true;
3812 QCamera3ProcessingChannel *channel =
3813 (QCamera3ProcessingChannel *)iter->stream->priv;
3814 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003815 if(p_is_metabuf_queued != NULL) {
3816 *p_is_metabuf_queued = true;
3817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003818 break;
3819 }
3820 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003821 for (auto itr = pendingRequest.internalRequestList.begin();
3822 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003823 if (itr->need_metadata) {
3824 internalPproc = true;
3825 QCamera3ProcessingChannel *channel =
3826 (QCamera3ProcessingChannel *)itr->stream->priv;
3827 channel->queueReprocMetadata(metadata_buf);
3828 break;
3829 }
3830 }
3831
Thierry Strudel54dc9782017-02-15 12:12:10 -08003832 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003833
3834 bool *enableZsl = nullptr;
3835 if (gExposeEnableZslKey) {
3836 enableZsl = &pendingRequest.enableZsl;
3837 }
3838
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003839 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003840 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003841 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003842
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003843 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003844
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003845 if (pendingRequest.blob_request) {
3846 //Dump tuning metadata if enabled and available
3847 char prop[PROPERTY_VALUE_MAX];
3848 memset(prop, 0, sizeof(prop));
3849 property_get("persist.camera.dumpmetadata", prop, "0");
3850 int32_t enabled = atoi(prop);
3851 if (enabled && metadata->is_tuning_params_valid) {
3852 dumpMetadataToFile(metadata->tuning_params,
3853 mMetaFrameCount,
3854 enabled,
3855 "Snapshot",
3856 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003857 }
3858 }
3859
3860 if (!internalPproc) {
3861 LOGD("couldn't find need_metadata for this metadata");
3862 // Return metadata buffer
3863 if (free_and_bufdone_meta_buf) {
3864 mMetadataChannel->bufDone(metadata_buf);
3865 free(metadata_buf);
3866 }
3867 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003868
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003869 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003870 }
3871 }
3872
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003873 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3874
3875 // Try to send out capture result metadata.
3876 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003877 return;
3878
Thierry Strudel3d639192016-09-09 11:52:26 -07003879done_metadata:
3880 for (pendingRequestIterator i = mPendingRequestsList.begin();
3881 i != mPendingRequestsList.end() ;i++) {
3882 i->pipeline_depth++;
3883 }
3884 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3885 unblockRequestIfNecessary();
3886}
3887
3888/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003889 * FUNCTION : handleDepthDataWithLock
3890 *
3891 * DESCRIPTION: Handles incoming depth data
3892 *
3893 * PARAMETERS : @depthData : Depth data
3894 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003895 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003896 *
3897 * RETURN :
3898 *
3899 *==========================================================================*/
3900void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003901 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003902 uint32_t currentFrameNumber;
3903 buffer_handle_t *depthBuffer;
3904
3905 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003906 return;
3907 }
3908
3909 camera3_stream_buffer_t resultBuffer =
3910 {.acquire_fence = -1,
3911 .release_fence = -1,
3912 .status = CAMERA3_BUFFER_STATUS_OK,
3913 .buffer = nullptr,
3914 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003915 do {
3916 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3917 if (nullptr == depthBuffer) {
3918 break;
3919 }
3920
Emilian Peev7650c122017-01-19 08:24:33 -08003921 resultBuffer.buffer = depthBuffer;
3922 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003923 if (valid) {
3924 int32_t rc = mDepthChannel->populateDepthData(depthData,
3925 frameNumber);
3926 if (NO_ERROR != rc) {
3927 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3928 } else {
3929 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3930 }
Emilian Peev7650c122017-01-19 08:24:33 -08003931 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003932 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003933 }
3934 } else if (currentFrameNumber > frameNumber) {
3935 break;
3936 } else {
3937 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3938 {{currentFrameNumber, mDepthChannel->getStream(),
3939 CAMERA3_MSG_ERROR_BUFFER}}};
3940 orchestrateNotify(&notify_msg);
3941
3942 LOGE("Depth buffer for frame number: %d is missing "
3943 "returning back!", currentFrameNumber);
3944 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3945 }
3946 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003947 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003948 } while (currentFrameNumber < frameNumber);
3949}
3950
3951/*===========================================================================
3952 * FUNCTION : notifyErrorFoPendingDepthData
3953 *
3954 * DESCRIPTION: Returns error for any pending depth buffers
3955 *
3956 * PARAMETERS : depthCh - depth channel that needs to get flushed
3957 *
3958 * RETURN :
3959 *
3960 *==========================================================================*/
3961void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3962 QCamera3DepthChannel *depthCh) {
3963 uint32_t currentFrameNumber;
3964 buffer_handle_t *depthBuffer;
3965
3966 if (nullptr == depthCh) {
3967 return;
3968 }
3969
3970 camera3_notify_msg_t notify_msg =
3971 {.type = CAMERA3_MSG_ERROR,
3972 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3973 camera3_stream_buffer_t resultBuffer =
3974 {.acquire_fence = -1,
3975 .release_fence = -1,
3976 .buffer = nullptr,
3977 .stream = depthCh->getStream(),
3978 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003979
3980 while (nullptr !=
3981 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3982 depthCh->unmapBuffer(currentFrameNumber);
3983
3984 notify_msg.message.error.frame_number = currentFrameNumber;
3985 orchestrateNotify(&notify_msg);
3986
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003987 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003988 };
3989}
3990
3991/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003992 * FUNCTION : hdrPlusPerfLock
3993 *
3994 * DESCRIPTION: perf lock for HDR+ using custom intent
3995 *
3996 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3997 *
3998 * RETURN : None
3999 *
4000 *==========================================================================*/
4001void QCamera3HardwareInterface::hdrPlusPerfLock(
4002 mm_camera_super_buf_t *metadata_buf)
4003{
4004 if (NULL == metadata_buf) {
4005 LOGE("metadata_buf is NULL");
4006 return;
4007 }
4008 metadata_buffer_t *metadata =
4009 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4010 int32_t *p_frame_number_valid =
4011 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4012 uint32_t *p_frame_number =
4013 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4014
4015 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4016 LOGE("%s: Invalid metadata", __func__);
4017 return;
4018 }
4019
4020 //acquire perf lock for 5 sec after the last HDR frame is captured
4021 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4022 if ((p_frame_number != NULL) &&
4023 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004024 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004025 }
4026 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004027}
4028
4029/*===========================================================================
4030 * FUNCTION : handleInputBufferWithLock
4031 *
4032 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4033 *
4034 * PARAMETERS : @frame_number: frame number of the input buffer
4035 *
4036 * RETURN :
4037 *
4038 *==========================================================================*/
4039void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4040{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004041 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004042 pendingRequestIterator i = mPendingRequestsList.begin();
4043 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4044 i++;
4045 }
4046 if (i != mPendingRequestsList.end() && i->input_buffer) {
4047 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004048 CameraMetadata settings;
4049 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4050 if(i->settings) {
4051 settings = i->settings;
4052 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4053 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004054 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004055 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004056 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004057 } else {
4058 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004059 }
4060
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004061 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4062 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4063 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004064
4065 camera3_capture_result result;
4066 memset(&result, 0, sizeof(camera3_capture_result));
4067 result.frame_number = frame_number;
4068 result.result = i->settings;
4069 result.input_buffer = i->input_buffer;
4070 result.partial_result = PARTIAL_RESULT_COUNT;
4071
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004072 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004073 LOGD("Input request metadata and input buffer frame_number = %u",
4074 i->frame_number);
4075 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004076
4077 // Dispatch result metadata that may be just unblocked by this reprocess result.
4078 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004079 } else {
4080 LOGE("Could not find input request for frame number %d", frame_number);
4081 }
4082}
4083
4084/*===========================================================================
4085 * FUNCTION : handleBufferWithLock
4086 *
4087 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4088 *
4089 * PARAMETERS : @buffer: image buffer for the callback
4090 * @frame_number: frame number of the image buffer
4091 *
4092 * RETURN :
4093 *
4094 *==========================================================================*/
4095void QCamera3HardwareInterface::handleBufferWithLock(
4096 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4097{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004098 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004099
4100 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4101 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4102 }
4103
Thierry Strudel3d639192016-09-09 11:52:26 -07004104 /* Nothing to be done during error state */
4105 if ((ERROR == mState) || (DEINIT == mState)) {
4106 return;
4107 }
4108 if (mFlushPerf) {
4109 handleBuffersDuringFlushLock(buffer);
4110 return;
4111 }
4112 //not in flush
4113 // If the frame number doesn't exist in the pending request list,
4114 // directly send the buffer to the frameworks, and update pending buffers map
4115 // Otherwise, book-keep the buffer.
4116 pendingRequestIterator i = mPendingRequestsList.begin();
4117 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4118 i++;
4119 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004120
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004121 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004122 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004123 // For a reprocessing request, try to send out result metadata.
4124 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004125 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004126 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004127
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004128 // Check if this frame was dropped.
4129 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4130 m != mPendingFrameDropList.end(); m++) {
4131 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4132 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4133 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4134 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4135 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4136 frame_number, streamID);
4137 m = mPendingFrameDropList.erase(m);
4138 break;
4139 }
4140 }
4141
4142 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4143 LOGH("result frame_number = %d, buffer = %p",
4144 frame_number, buffer->buffer);
4145
4146 mPendingBuffersMap.removeBuf(buffer->buffer);
4147 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4148
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004149 if (mPreviewStarted == false) {
4150 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4151 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004152 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4153
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004154 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4155 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4156 mPreviewStarted = true;
4157
4158 // Set power hint for preview
4159 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4160 }
4161 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004162}
4163
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004164void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004165 const camera_metadata_t *resultMetadata)
4166{
4167 // Find the pending request for this result metadata.
4168 auto requestIter = mPendingRequestsList.begin();
4169 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4170 requestIter++;
4171 }
4172
4173 if (requestIter == mPendingRequestsList.end()) {
4174 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4175 return;
4176 }
4177
4178 // Update the result metadata
4179 requestIter->resultMetadata = resultMetadata;
4180
4181 // Check what type of request this is.
4182 bool liveRequest = false;
4183 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004184 // HDR+ request doesn't have partial results.
4185 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004186 } else if (requestIter->input_buffer != nullptr) {
4187 // Reprocessing request result is the same as settings.
4188 requestIter->resultMetadata = requestIter->settings;
4189 // Reprocessing request doesn't have partial results.
4190 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4191 } else {
4192 liveRequest = true;
4193 requestIter->partial_result_cnt++;
4194 mPendingLiveRequest--;
4195
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004196 {
4197 Mutex::Autolock l(gHdrPlusClientLock);
4198 // For a live request, send the metadata to HDR+ client.
4199 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4200 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4201 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4202 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004203 }
4204 }
4205
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004206 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4207}
4208
4209void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4210 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004211 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4212 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004213 bool readyToSend = true;
4214
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004215 // Iterate through the pending requests to send out result metadata that are ready. Also if
4216 // this result metadata belongs to a live request, notify errors for previous live requests
4217 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004218 auto iter = mPendingRequestsList.begin();
4219 while (iter != mPendingRequestsList.end()) {
4220 // Check if current pending request is ready. If it's not ready, the following pending
4221 // requests are also not ready.
4222 if (readyToSend && iter->resultMetadata == nullptr) {
4223 readyToSend = false;
4224 }
4225
4226 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4227
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004228 camera3_capture_result_t result = {};
4229 result.frame_number = iter->frame_number;
4230 result.result = iter->resultMetadata;
4231 result.partial_result = iter->partial_result_cnt;
4232
4233 // If this pending buffer has result metadata, we may be able to send out shutter callback
4234 // and result metadata.
4235 if (iter->resultMetadata != nullptr) {
4236 if (!readyToSend) {
4237 // If any of the previous pending request is not ready, this pending request is
4238 // also not ready to send in order to keep shutter callbacks and result metadata
4239 // in order.
4240 iter++;
4241 continue;
4242 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004243 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004244 // If the result metadata belongs to a live request, notify errors for previous pending
4245 // live requests.
4246 mPendingLiveRequest--;
4247
4248 CameraMetadata dummyMetadata;
4249 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4250 result.result = dummyMetadata.release();
4251
4252 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004253
4254 // partial_result should be PARTIAL_RESULT_CNT in case of
4255 // ERROR_RESULT.
4256 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4257 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004258 } else {
4259 iter++;
4260 continue;
4261 }
4262
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004263 result.output_buffers = nullptr;
4264 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004265 orchestrateResult(&result);
4266
4267 // For reprocessing, result metadata is the same as settings so do not free it here to
4268 // avoid double free.
4269 if (result.result != iter->settings) {
4270 free_camera_metadata((camera_metadata_t *)result.result);
4271 }
4272 iter->resultMetadata = nullptr;
4273 iter = erasePendingRequest(iter);
4274 }
4275
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004276 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004277 for (auto &iter : mPendingRequestsList) {
4278 // Increment pipeline depth for the following pending requests.
4279 if (iter.frame_number > frameNumber) {
4280 iter.pipeline_depth++;
4281 }
4282 }
4283 }
4284
4285 unblockRequestIfNecessary();
4286}
4287
Thierry Strudel3d639192016-09-09 11:52:26 -07004288/*===========================================================================
4289 * FUNCTION : unblockRequestIfNecessary
4290 *
4291 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4292 * that mMutex is held when this function is called.
4293 *
4294 * PARAMETERS :
4295 *
4296 * RETURN :
4297 *
4298 *==========================================================================*/
4299void QCamera3HardwareInterface::unblockRequestIfNecessary()
4300{
4301 // Unblock process_capture_request
4302 pthread_cond_signal(&mRequestCond);
4303}
4304
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004305/*===========================================================================
4306 * FUNCTION : isHdrSnapshotRequest
4307 *
4308 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4309 *
4310 * PARAMETERS : camera3 request structure
4311 *
4312 * RETURN : boolean decision variable
4313 *
4314 *==========================================================================*/
4315bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4316{
4317 if (request == NULL) {
4318 LOGE("Invalid request handle");
4319 assert(0);
4320 return false;
4321 }
4322
4323 if (!mForceHdrSnapshot) {
4324 CameraMetadata frame_settings;
4325 frame_settings = request->settings;
4326
4327 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4328 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4329 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4330 return false;
4331 }
4332 } else {
4333 return false;
4334 }
4335
4336 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4337 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4338 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4339 return false;
4340 }
4341 } else {
4342 return false;
4343 }
4344 }
4345
4346 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4347 if (request->output_buffers[i].stream->format
4348 == HAL_PIXEL_FORMAT_BLOB) {
4349 return true;
4350 }
4351 }
4352
4353 return false;
4354}
4355/*===========================================================================
4356 * FUNCTION : orchestrateRequest
4357 *
4358 * DESCRIPTION: Orchestrates a capture request from camera service
4359 *
4360 * PARAMETERS :
4361 * @request : request from framework to process
4362 *
4363 * RETURN : Error status codes
4364 *
4365 *==========================================================================*/
4366int32_t QCamera3HardwareInterface::orchestrateRequest(
4367 camera3_capture_request_t *request)
4368{
4369
4370 uint32_t originalFrameNumber = request->frame_number;
4371 uint32_t originalOutputCount = request->num_output_buffers;
4372 const camera_metadata_t *original_settings = request->settings;
4373 List<InternalRequest> internallyRequestedStreams;
4374 List<InternalRequest> emptyInternalList;
4375
4376 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4377 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4378 uint32_t internalFrameNumber;
4379 CameraMetadata modified_meta;
4380
4381
4382 /* Add Blob channel to list of internally requested streams */
4383 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4384 if (request->output_buffers[i].stream->format
4385 == HAL_PIXEL_FORMAT_BLOB) {
4386 InternalRequest streamRequested;
4387 streamRequested.meteringOnly = 1;
4388 streamRequested.need_metadata = 0;
4389 streamRequested.stream = request->output_buffers[i].stream;
4390 internallyRequestedStreams.push_back(streamRequested);
4391 }
4392 }
4393 request->num_output_buffers = 0;
4394 auto itr = internallyRequestedStreams.begin();
4395
4396 /* Modify setting to set compensation */
4397 modified_meta = request->settings;
4398 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4399 uint8_t aeLock = 1;
4400 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4401 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4402 camera_metadata_t *modified_settings = modified_meta.release();
4403 request->settings = modified_settings;
4404
4405 /* Capture Settling & -2x frame */
4406 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4407 request->frame_number = internalFrameNumber;
4408 processCaptureRequest(request, internallyRequestedStreams);
4409
4410 request->num_output_buffers = originalOutputCount;
4411 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4412 request->frame_number = internalFrameNumber;
4413 processCaptureRequest(request, emptyInternalList);
4414 request->num_output_buffers = 0;
4415
4416 modified_meta = modified_settings;
4417 expCompensation = 0;
4418 aeLock = 1;
4419 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4420 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4421 modified_settings = modified_meta.release();
4422 request->settings = modified_settings;
4423
4424 /* Capture Settling & 0X frame */
4425
4426 itr = internallyRequestedStreams.begin();
4427 if (itr == internallyRequestedStreams.end()) {
4428 LOGE("Error Internally Requested Stream list is empty");
4429 assert(0);
4430 } else {
4431 itr->need_metadata = 0;
4432 itr->meteringOnly = 1;
4433 }
4434
4435 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4436 request->frame_number = internalFrameNumber;
4437 processCaptureRequest(request, internallyRequestedStreams);
4438
4439 itr = internallyRequestedStreams.begin();
4440 if (itr == internallyRequestedStreams.end()) {
4441 ALOGE("Error Internally Requested Stream list is empty");
4442 assert(0);
4443 } else {
4444 itr->need_metadata = 1;
4445 itr->meteringOnly = 0;
4446 }
4447
4448 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4449 request->frame_number = internalFrameNumber;
4450 processCaptureRequest(request, internallyRequestedStreams);
4451
4452 /* Capture 2X frame*/
4453 modified_meta = modified_settings;
4454 expCompensation = GB_HDR_2X_STEP_EV;
4455 aeLock = 1;
4456 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4457 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4458 modified_settings = modified_meta.release();
4459 request->settings = modified_settings;
4460
4461 itr = internallyRequestedStreams.begin();
4462 if (itr == internallyRequestedStreams.end()) {
4463 ALOGE("Error Internally Requested Stream list is empty");
4464 assert(0);
4465 } else {
4466 itr->need_metadata = 0;
4467 itr->meteringOnly = 1;
4468 }
4469 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4470 request->frame_number = internalFrameNumber;
4471 processCaptureRequest(request, internallyRequestedStreams);
4472
4473 itr = internallyRequestedStreams.begin();
4474 if (itr == internallyRequestedStreams.end()) {
4475 ALOGE("Error Internally Requested Stream list is empty");
4476 assert(0);
4477 } else {
4478 itr->need_metadata = 1;
4479 itr->meteringOnly = 0;
4480 }
4481
4482 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4483 request->frame_number = internalFrameNumber;
4484 processCaptureRequest(request, internallyRequestedStreams);
4485
4486
4487 /* Capture 2X on original streaming config*/
4488 internallyRequestedStreams.clear();
4489
4490 /* Restore original settings pointer */
4491 request->settings = original_settings;
4492 } else {
4493 uint32_t internalFrameNumber;
4494 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4495 request->frame_number = internalFrameNumber;
4496 return processCaptureRequest(request, internallyRequestedStreams);
4497 }
4498
4499 return NO_ERROR;
4500}
4501
4502/*===========================================================================
4503 * FUNCTION : orchestrateResult
4504 *
4505 * DESCRIPTION: Orchestrates a capture result to camera service
4506 *
4507 * PARAMETERS :
4508 * @request : request from framework to process
4509 *
4510 * RETURN :
4511 *
4512 *==========================================================================*/
4513void QCamera3HardwareInterface::orchestrateResult(
4514 camera3_capture_result_t *result)
4515{
4516 uint32_t frameworkFrameNumber;
4517 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4518 frameworkFrameNumber);
4519 if (rc != NO_ERROR) {
4520 LOGE("Cannot find translated frameworkFrameNumber");
4521 assert(0);
4522 } else {
4523 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004524 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004525 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004526 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004527 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4528 camera_metadata_entry_t entry;
4529 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4530 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004531 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004532 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4533 if (ret != OK)
4534 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004535 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004536 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004537 result->frame_number = frameworkFrameNumber;
4538 mCallbackOps->process_capture_result(mCallbackOps, result);
4539 }
4540 }
4541}
4542
4543/*===========================================================================
4544 * FUNCTION : orchestrateNotify
4545 *
4546 * DESCRIPTION: Orchestrates a notify to camera service
4547 *
4548 * PARAMETERS :
4549 * @request : request from framework to process
4550 *
4551 * RETURN :
4552 *
4553 *==========================================================================*/
4554void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4555{
4556 uint32_t frameworkFrameNumber;
4557 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004558 int32_t rc = NO_ERROR;
4559
4560 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004561 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004562
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004563 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004564 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4565 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4566 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004567 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004568 LOGE("Cannot find translated frameworkFrameNumber");
4569 assert(0);
4570 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004571 }
4572 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004573
4574 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4575 LOGD("Internal Request drop the notifyCb");
4576 } else {
4577 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4578 mCallbackOps->notify(mCallbackOps, notify_msg);
4579 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004580}
4581
4582/*===========================================================================
4583 * FUNCTION : FrameNumberRegistry
4584 *
4585 * DESCRIPTION: Constructor
4586 *
4587 * PARAMETERS :
4588 *
4589 * RETURN :
4590 *
4591 *==========================================================================*/
4592FrameNumberRegistry::FrameNumberRegistry()
4593{
4594 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4595}
4596
4597/*===========================================================================
4598 * FUNCTION : ~FrameNumberRegistry
4599 *
4600 * DESCRIPTION: Destructor
4601 *
4602 * PARAMETERS :
4603 *
4604 * RETURN :
4605 *
4606 *==========================================================================*/
4607FrameNumberRegistry::~FrameNumberRegistry()
4608{
4609}
4610
4611/*===========================================================================
4612 * FUNCTION : PurgeOldEntriesLocked
4613 *
4614 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4615 *
4616 * PARAMETERS :
4617 *
4618 * RETURN : NONE
4619 *
4620 *==========================================================================*/
4621void FrameNumberRegistry::purgeOldEntriesLocked()
4622{
4623 while (_register.begin() != _register.end()) {
4624 auto itr = _register.begin();
4625 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4626 _register.erase(itr);
4627 } else {
4628 return;
4629 }
4630 }
4631}
4632
4633/*===========================================================================
4634 * FUNCTION : allocStoreInternalFrameNumber
4635 *
4636 * DESCRIPTION: Method to note down a framework request and associate a new
4637 * internal request number against it
4638 *
4639 * PARAMETERS :
4640 * @fFrameNumber: Identifier given by framework
4641 * @internalFN : Output parameter which will have the newly generated internal
4642 * entry
4643 *
4644 * RETURN : Error code
4645 *
4646 *==========================================================================*/
4647int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4648 uint32_t &internalFrameNumber)
4649{
4650 Mutex::Autolock lock(mRegistryLock);
4651 internalFrameNumber = _nextFreeInternalNumber++;
4652 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4653 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4654 purgeOldEntriesLocked();
4655 return NO_ERROR;
4656}
4657
4658/*===========================================================================
4659 * FUNCTION : generateStoreInternalFrameNumber
4660 *
4661 * DESCRIPTION: Method to associate a new internal request number independent
4662 * of any associate with framework requests
4663 *
4664 * PARAMETERS :
4665 * @internalFrame#: Output parameter which will have the newly generated internal
4666 *
4667 *
4668 * RETURN : Error code
4669 *
4670 *==========================================================================*/
4671int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4672{
4673 Mutex::Autolock lock(mRegistryLock);
4674 internalFrameNumber = _nextFreeInternalNumber++;
4675 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4676 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4677 purgeOldEntriesLocked();
4678 return NO_ERROR;
4679}
4680
4681/*===========================================================================
4682 * FUNCTION : getFrameworkFrameNumber
4683 *
4684 * DESCRIPTION: Method to query the framework framenumber given an internal #
4685 *
4686 * PARAMETERS :
4687 * @internalFrame#: Internal reference
4688 * @frameworkframenumber: Output parameter holding framework frame entry
4689 *
4690 * RETURN : Error code
4691 *
4692 *==========================================================================*/
4693int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4694 uint32_t &frameworkFrameNumber)
4695{
4696 Mutex::Autolock lock(mRegistryLock);
4697 auto itr = _register.find(internalFrameNumber);
4698 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004699 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004700 return -ENOENT;
4701 }
4702
4703 frameworkFrameNumber = itr->second;
4704 purgeOldEntriesLocked();
4705 return NO_ERROR;
4706}
Thierry Strudel3d639192016-09-09 11:52:26 -07004707
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004708status_t QCamera3HardwareInterface::fillPbStreamConfig(
4709 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4710 QCamera3Channel *channel, uint32_t streamIndex) {
4711 if (config == nullptr) {
4712 LOGE("%s: config is null", __FUNCTION__);
4713 return BAD_VALUE;
4714 }
4715
4716 if (channel == nullptr) {
4717 LOGE("%s: channel is null", __FUNCTION__);
4718 return BAD_VALUE;
4719 }
4720
4721 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4722 if (stream == nullptr) {
4723 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4724 return NAME_NOT_FOUND;
4725 }
4726
4727 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4728 if (streamInfo == nullptr) {
4729 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4730 return NAME_NOT_FOUND;
4731 }
4732
4733 config->id = pbStreamId;
4734 config->image.width = streamInfo->dim.width;
4735 config->image.height = streamInfo->dim.height;
4736 config->image.padding = 0;
4737 config->image.format = pbStreamFormat;
4738
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004739 uint32_t totalPlaneSize = 0;
4740
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004741 // Fill plane information.
4742 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4743 pbcamera::PlaneConfiguration plane;
4744 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4745 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4746 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004747
4748 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004749 }
4750
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004751 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004752 return OK;
4753}
4754
Thierry Strudel3d639192016-09-09 11:52:26 -07004755/*===========================================================================
4756 * FUNCTION : processCaptureRequest
4757 *
4758 * DESCRIPTION: process a capture request from camera service
4759 *
4760 * PARAMETERS :
4761 * @request : request from framework to process
4762 *
4763 * RETURN :
4764 *
4765 *==========================================================================*/
4766int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004767 camera3_capture_request_t *request,
4768 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004769{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004770 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004771 int rc = NO_ERROR;
4772 int32_t request_id;
4773 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004774 bool isVidBufRequested = false;
4775 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004776 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004777
4778 pthread_mutex_lock(&mMutex);
4779
4780 // Validate current state
4781 switch (mState) {
4782 case CONFIGURED:
4783 case STARTED:
4784 /* valid state */
4785 break;
4786
4787 case ERROR:
4788 pthread_mutex_unlock(&mMutex);
4789 handleCameraDeviceError();
4790 return -ENODEV;
4791
4792 default:
4793 LOGE("Invalid state %d", mState);
4794 pthread_mutex_unlock(&mMutex);
4795 return -ENODEV;
4796 }
4797
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004798 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004799 if (rc != NO_ERROR) {
4800 LOGE("incoming request is not valid");
4801 pthread_mutex_unlock(&mMutex);
4802 return rc;
4803 }
4804
4805 meta = request->settings;
4806
4807 // For first capture request, send capture intent, and
4808 // stream on all streams
4809 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004810 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004811 // send an unconfigure to the backend so that the isp
4812 // resources are deallocated
4813 if (!mFirstConfiguration) {
4814 cam_stream_size_info_t stream_config_info;
4815 int32_t hal_version = CAM_HAL_V3;
4816 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4817 stream_config_info.buffer_info.min_buffers =
4818 MIN_INFLIGHT_REQUESTS;
4819 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004820 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004821 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004822 clear_metadata_buffer(mParameters);
4823 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4824 CAM_INTF_PARM_HAL_VERSION, hal_version);
4825 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4826 CAM_INTF_META_STREAM_INFO, stream_config_info);
4827 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4828 mParameters);
4829 if (rc < 0) {
4830 LOGE("set_parms for unconfigure failed");
4831 pthread_mutex_unlock(&mMutex);
4832 return rc;
4833 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004834
Thierry Strudel3d639192016-09-09 11:52:26 -07004835 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004836 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004837 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004838 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004839 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004840 property_get("persist.camera.is_type", is_type_value, "4");
4841 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4842 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4843 property_get("persist.camera.is_type_preview", is_type_value, "4");
4844 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4845 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004846
4847 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4848 int32_t hal_version = CAM_HAL_V3;
4849 uint8_t captureIntent =
4850 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4851 mCaptureIntent = captureIntent;
4852 clear_metadata_buffer(mParameters);
4853 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4854 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4855 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004856 if (mFirstConfiguration) {
4857 // configure instant AEC
4858 // Instant AEC is a session based parameter and it is needed only
4859 // once per complete session after open camera.
4860 // i.e. This is set only once for the first capture request, after open camera.
4861 setInstantAEC(meta);
4862 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004863 uint8_t fwkVideoStabMode=0;
4864 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4865 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4866 }
4867
Xue Tuecac74e2017-04-17 13:58:15 -07004868 // If EIS setprop is enabled then only turn it on for video/preview
4869 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004870 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004871 int32_t vsMode;
4872 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4873 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4874 rc = BAD_VALUE;
4875 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004876 LOGD("setEis %d", setEis);
4877 bool eis3Supported = false;
4878 size_t count = IS_TYPE_MAX;
4879 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4880 for (size_t i = 0; i < count; i++) {
4881 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4882 eis3Supported = true;
4883 break;
4884 }
4885 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004886
4887 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004888 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004889 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4890 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004891 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4892 is_type = isTypePreview;
4893 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4894 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4895 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004896 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004897 } else {
4898 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004899 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004900 } else {
4901 is_type = IS_TYPE_NONE;
4902 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004904 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004905 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4906 }
4907 }
4908
4909 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4910 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4911
Thierry Strudel54dc9782017-02-15 12:12:10 -08004912 //Disable tintless only if the property is set to 0
4913 memset(prop, 0, sizeof(prop));
4914 property_get("persist.camera.tintless.enable", prop, "1");
4915 int32_t tintless_value = atoi(prop);
4916
Thierry Strudel3d639192016-09-09 11:52:26 -07004917 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4918 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004919
Thierry Strudel3d639192016-09-09 11:52:26 -07004920 //Disable CDS for HFR mode or if DIS/EIS is on.
4921 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4922 //after every configure_stream
4923 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4924 (m_bIsVideo)) {
4925 int32_t cds = CAM_CDS_MODE_OFF;
4926 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4927 CAM_INTF_PARM_CDS_MODE, cds))
4928 LOGE("Failed to disable CDS for HFR mode");
4929
4930 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004931
4932 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4933 uint8_t* use_av_timer = NULL;
4934
4935 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004936 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004937 use_av_timer = &m_debug_avtimer;
4938 }
4939 else{
4940 use_av_timer =
4941 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004942 if (use_av_timer) {
4943 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4944 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004945 }
4946
4947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4948 rc = BAD_VALUE;
4949 }
4950 }
4951
Thierry Strudel3d639192016-09-09 11:52:26 -07004952 setMobicat();
4953
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004954 uint8_t nrMode = 0;
4955 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4956 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4957 }
4958
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 /* Set fps and hfr mode while sending meta stream info so that sensor
4960 * can configure appropriate streaming mode */
4961 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004962 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4963 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004964 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4965 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004966 if (rc == NO_ERROR) {
4967 int32_t max_fps =
4968 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004969 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004970 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4971 }
4972 /* For HFR, more buffers are dequeued upfront to improve the performance */
4973 if (mBatchSize) {
4974 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4975 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4976 }
4977 }
4978 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004979 LOGE("setHalFpsRange failed");
4980 }
4981 }
4982 if (meta.exists(ANDROID_CONTROL_MODE)) {
4983 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4984 rc = extractSceneMode(meta, metaMode, mParameters);
4985 if (rc != NO_ERROR) {
4986 LOGE("extractSceneMode failed");
4987 }
4988 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004989 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004990
Thierry Strudel04e026f2016-10-10 11:27:36 -07004991 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4992 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4993 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4994 rc = setVideoHdrMode(mParameters, vhdr);
4995 if (rc != NO_ERROR) {
4996 LOGE("setVideoHDR is failed");
4997 }
4998 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004999
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005000 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005001 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005002 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005003 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5005 sensorModeFullFov)) {
5006 rc = BAD_VALUE;
5007 }
5008 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 //TODO: validate the arguments, HSV scenemode should have only the
5010 //advertised fps ranges
5011
5012 /*set the capture intent, hal version, tintless, stream info,
5013 *and disenable parameters to the backend*/
5014 LOGD("set_parms META_STREAM_INFO " );
5015 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005016 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5017 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005018 mStreamConfigInfo.type[i],
5019 mStreamConfigInfo.stream_sizes[i].width,
5020 mStreamConfigInfo.stream_sizes[i].height,
5021 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005022 mStreamConfigInfo.format[i],
5023 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005024 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005025
Thierry Strudel3d639192016-09-09 11:52:26 -07005026 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5027 mParameters);
5028 if (rc < 0) {
5029 LOGE("set_parms failed for hal version, stream info");
5030 }
5031
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005032 cam_sensor_mode_info_t sensorModeInfo = {};
5033 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005034 if (rc != NO_ERROR) {
5035 LOGE("Failed to get sensor output size");
5036 pthread_mutex_unlock(&mMutex);
5037 goto error_exit;
5038 }
5039
5040 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5041 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005042 sensorModeInfo.active_array_size.width,
5043 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005044
5045 /* Set batchmode before initializing channel. Since registerBuffer
5046 * internally initializes some of the channels, better set batchmode
5047 * even before first register buffer */
5048 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5049 it != mStreamInfo.end(); it++) {
5050 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5051 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5052 && mBatchSize) {
5053 rc = channel->setBatchSize(mBatchSize);
5054 //Disable per frame map unmap for HFR/batchmode case
5055 rc |= channel->setPerFrameMapUnmap(false);
5056 if (NO_ERROR != rc) {
5057 LOGE("Channel init failed %d", rc);
5058 pthread_mutex_unlock(&mMutex);
5059 goto error_exit;
5060 }
5061 }
5062 }
5063
5064 //First initialize all streams
5065 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5066 it != mStreamInfo.end(); it++) {
5067 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005068
5069 /* Initial value of NR mode is needed before stream on */
5070 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005071 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5072 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005073 setEis) {
5074 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5075 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5076 is_type = mStreamConfigInfo.is_type[i];
5077 break;
5078 }
5079 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005080 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005081 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005082 rc = channel->initialize(IS_TYPE_NONE);
5083 }
5084 if (NO_ERROR != rc) {
5085 LOGE("Channel initialization failed %d", rc);
5086 pthread_mutex_unlock(&mMutex);
5087 goto error_exit;
5088 }
5089 }
5090
5091 if (mRawDumpChannel) {
5092 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5093 if (rc != NO_ERROR) {
5094 LOGE("Error: Raw Dump Channel init failed");
5095 pthread_mutex_unlock(&mMutex);
5096 goto error_exit;
5097 }
5098 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005099 if (mHdrPlusRawSrcChannel) {
5100 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5101 if (rc != NO_ERROR) {
5102 LOGE("Error: HDR+ RAW Source Channel init failed");
5103 pthread_mutex_unlock(&mMutex);
5104 goto error_exit;
5105 }
5106 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005107 if (mSupportChannel) {
5108 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5109 if (rc < 0) {
5110 LOGE("Support channel initialization failed");
5111 pthread_mutex_unlock(&mMutex);
5112 goto error_exit;
5113 }
5114 }
5115 if (mAnalysisChannel) {
5116 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5117 if (rc < 0) {
5118 LOGE("Analysis channel initialization failed");
5119 pthread_mutex_unlock(&mMutex);
5120 goto error_exit;
5121 }
5122 }
5123 if (mDummyBatchChannel) {
5124 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5125 if (rc < 0) {
5126 LOGE("mDummyBatchChannel setBatchSize failed");
5127 pthread_mutex_unlock(&mMutex);
5128 goto error_exit;
5129 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005130 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005131 if (rc < 0) {
5132 LOGE("mDummyBatchChannel initialization failed");
5133 pthread_mutex_unlock(&mMutex);
5134 goto error_exit;
5135 }
5136 }
5137
5138 // Set bundle info
5139 rc = setBundleInfo();
5140 if (rc < 0) {
5141 LOGE("setBundleInfo failed %d", rc);
5142 pthread_mutex_unlock(&mMutex);
5143 goto error_exit;
5144 }
5145
5146 //update settings from app here
5147 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5148 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5149 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5150 }
5151 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5152 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5153 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5154 }
5155 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5156 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5157 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5158
5159 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5160 (mLinkedCameraId != mCameraId) ) {
5161 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5162 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005163 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005164 goto error_exit;
5165 }
5166 }
5167
5168 // add bundle related cameras
5169 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5170 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005171 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5172 &m_pDualCamCmdPtr->bundle_info;
5173 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 if (mIsDeviceLinked)
5175 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5176 else
5177 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5178
5179 pthread_mutex_lock(&gCamLock);
5180
5181 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5182 LOGE("Dualcam: Invalid Session Id ");
5183 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005184 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005185 goto error_exit;
5186 }
5187
5188 if (mIsMainCamera == 1) {
5189 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5190 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005191 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005192 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005193 // related session id should be session id of linked session
5194 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5195 } else {
5196 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5197 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005198 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005199 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005200 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5201 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005202 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005203 pthread_mutex_unlock(&gCamLock);
5204
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005205 rc = mCameraHandle->ops->set_dual_cam_cmd(
5206 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005207 if (rc < 0) {
5208 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005209 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005210 goto error_exit;
5211 }
5212 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005213 goto no_error;
5214error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005215 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005216 return rc;
5217no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005218 mWokenUpByDaemon = false;
5219 mPendingLiveRequest = 0;
5220 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005221 }
5222
5223 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005224 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005225
5226 if (mFlushPerf) {
5227 //we cannot accept any requests during flush
5228 LOGE("process_capture_request cannot proceed during flush");
5229 pthread_mutex_unlock(&mMutex);
5230 return NO_ERROR; //should return an error
5231 }
5232
5233 if (meta.exists(ANDROID_REQUEST_ID)) {
5234 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5235 mCurrentRequestId = request_id;
5236 LOGD("Received request with id: %d", request_id);
5237 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5238 LOGE("Unable to find request id field, \
5239 & no previous id available");
5240 pthread_mutex_unlock(&mMutex);
5241 return NAME_NOT_FOUND;
5242 } else {
5243 LOGD("Re-using old request id");
5244 request_id = mCurrentRequestId;
5245 }
5246
5247 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5248 request->num_output_buffers,
5249 request->input_buffer,
5250 frameNumber);
5251 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005252 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005253 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005254 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005255 uint32_t snapshotStreamId = 0;
5256 for (size_t i = 0; i < request->num_output_buffers; i++) {
5257 const camera3_stream_buffer_t& output = request->output_buffers[i];
5258 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5259
Emilian Peev7650c122017-01-19 08:24:33 -08005260 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5261 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005262 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005263 blob_request = 1;
5264 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5265 }
5266
5267 if (output.acquire_fence != -1) {
5268 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5269 close(output.acquire_fence);
5270 if (rc != OK) {
5271 LOGE("sync wait failed %d", rc);
5272 pthread_mutex_unlock(&mMutex);
5273 return rc;
5274 }
5275 }
5276
Emilian Peev0f3c3162017-03-15 12:57:46 +00005277 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5278 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005279 depthRequestPresent = true;
5280 continue;
5281 }
5282
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005283 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005284 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005285
5286 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5287 isVidBufRequested = true;
5288 }
5289 }
5290
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005291 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5292 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5293 itr++) {
5294 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5295 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5296 channel->getStreamID(channel->getStreamTypeMask());
5297
5298 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5299 isVidBufRequested = true;
5300 }
5301 }
5302
Thierry Strudel3d639192016-09-09 11:52:26 -07005303 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005304 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005305 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005306 }
5307 if (blob_request && mRawDumpChannel) {
5308 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005309 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005311 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005312 }
5313
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005314 {
5315 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5316 // Request a RAW buffer if
5317 // 1. mHdrPlusRawSrcChannel is valid.
5318 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5319 // 3. There is no pending HDR+ request.
5320 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5321 mHdrPlusPendingRequests.size() == 0) {
5322 streamsArray.stream_request[streamsArray.num_streams].streamID =
5323 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5324 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5325 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005326 }
5327
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005328 //extract capture intent
5329 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5330 mCaptureIntent =
5331 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5332 }
5333
5334 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5335 mCacMode =
5336 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5337 }
5338
5339 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005340 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005341
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005342 {
5343 Mutex::Autolock l(gHdrPlusClientLock);
5344 // If this request has a still capture intent, try to submit an HDR+ request.
5345 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5346 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5347 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5348 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005349 }
5350
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005351 if (hdrPlusRequest) {
5352 // For a HDR+ request, just set the frame parameters.
5353 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5354 if (rc < 0) {
5355 LOGE("fail to set frame parameters");
5356 pthread_mutex_unlock(&mMutex);
5357 return rc;
5358 }
5359 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005360 /* Parse the settings:
5361 * - For every request in NORMAL MODE
5362 * - For every request in HFR mode during preview only case
5363 * - For first request of every batch in HFR mode during video
5364 * recording. In batchmode the same settings except frame number is
5365 * repeated in each request of the batch.
5366 */
5367 if (!mBatchSize ||
5368 (mBatchSize && !isVidBufRequested) ||
5369 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005370 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005371 if (rc < 0) {
5372 LOGE("fail to set frame parameters");
5373 pthread_mutex_unlock(&mMutex);
5374 return rc;
5375 }
5376 }
5377 /* For batchMode HFR, setFrameParameters is not called for every
5378 * request. But only frame number of the latest request is parsed.
5379 * Keep track of first and last frame numbers in a batch so that
5380 * metadata for the frame numbers of batch can be duplicated in
5381 * handleBatchMetadta */
5382 if (mBatchSize) {
5383 if (!mToBeQueuedVidBufs) {
5384 //start of the batch
5385 mFirstFrameNumberInBatch = request->frame_number;
5386 }
5387 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5388 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5389 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005390 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005391 return BAD_VALUE;
5392 }
5393 }
5394 if (mNeedSensorRestart) {
5395 /* Unlock the mutex as restartSensor waits on the channels to be
5396 * stopped, which in turn calls stream callback functions -
5397 * handleBufferWithLock and handleMetadataWithLock */
5398 pthread_mutex_unlock(&mMutex);
5399 rc = dynamicUpdateMetaStreamInfo();
5400 if (rc != NO_ERROR) {
5401 LOGE("Restarting the sensor failed");
5402 return BAD_VALUE;
5403 }
5404 mNeedSensorRestart = false;
5405 pthread_mutex_lock(&mMutex);
5406 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005407 if(mResetInstantAEC) {
5408 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5409 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5410 mResetInstantAEC = false;
5411 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005412 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005413 if (request->input_buffer->acquire_fence != -1) {
5414 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5415 close(request->input_buffer->acquire_fence);
5416 if (rc != OK) {
5417 LOGE("input buffer sync wait failed %d", rc);
5418 pthread_mutex_unlock(&mMutex);
5419 return rc;
5420 }
5421 }
5422 }
5423
5424 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5425 mLastCustIntentFrmNum = frameNumber;
5426 }
5427 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005428 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005429 pendingRequestIterator latestRequest;
5430 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005431 pendingRequest.num_buffers = depthRequestPresent ?
5432 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005433 pendingRequest.request_id = request_id;
5434 pendingRequest.blob_request = blob_request;
5435 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005436 if (request->input_buffer) {
5437 pendingRequest.input_buffer =
5438 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5439 *(pendingRequest.input_buffer) = *(request->input_buffer);
5440 pInputBuffer = pendingRequest.input_buffer;
5441 } else {
5442 pendingRequest.input_buffer = NULL;
5443 pInputBuffer = NULL;
5444 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005445 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005446
5447 pendingRequest.pipeline_depth = 0;
5448 pendingRequest.partial_result_cnt = 0;
5449 extractJpegMetadata(mCurJpegMeta, request);
5450 pendingRequest.jpegMetadata = mCurJpegMeta;
5451 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005452 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005453 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5454 mHybridAeEnable =
5455 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5456 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005457
5458 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5459 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005460 /* DevCamDebug metadata processCaptureRequest */
5461 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5462 mDevCamDebugMetaEnable =
5463 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5464 }
5465 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5466 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005467
5468 //extract CAC info
5469 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5470 mCacMode =
5471 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5472 }
5473 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005474 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005475
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005476 // extract enableZsl info
5477 if (gExposeEnableZslKey) {
5478 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5479 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5480 mZslEnabled = pendingRequest.enableZsl;
5481 } else {
5482 pendingRequest.enableZsl = mZslEnabled;
5483 }
5484 }
5485
Thierry Strudel3d639192016-09-09 11:52:26 -07005486 PendingBuffersInRequest bufsForCurRequest;
5487 bufsForCurRequest.frame_number = frameNumber;
5488 // Mark current timestamp for the new request
5489 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005490 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005491
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005492 if (hdrPlusRequest) {
5493 // Save settings for this request.
5494 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5495 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5496
5497 // Add to pending HDR+ request queue.
5498 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5499 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5500
5501 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5502 }
5503
Thierry Strudel3d639192016-09-09 11:52:26 -07005504 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005505 if ((request->output_buffers[i].stream->data_space ==
5506 HAL_DATASPACE_DEPTH) &&
5507 (HAL_PIXEL_FORMAT_BLOB ==
5508 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005509 continue;
5510 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005511 RequestedBufferInfo requestedBuf;
5512 memset(&requestedBuf, 0, sizeof(requestedBuf));
5513 requestedBuf.stream = request->output_buffers[i].stream;
5514 requestedBuf.buffer = NULL;
5515 pendingRequest.buffers.push_back(requestedBuf);
5516
5517 // Add to buffer handle the pending buffers list
5518 PendingBufferInfo bufferInfo;
5519 bufferInfo.buffer = request->output_buffers[i].buffer;
5520 bufferInfo.stream = request->output_buffers[i].stream;
5521 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5522 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5523 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5524 frameNumber, bufferInfo.buffer,
5525 channel->getStreamTypeMask(), bufferInfo.stream->format);
5526 }
5527 // Add this request packet into mPendingBuffersMap
5528 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5529 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5530 mPendingBuffersMap.get_num_overall_buffers());
5531
5532 latestRequest = mPendingRequestsList.insert(
5533 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005534
5535 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5536 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005537 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005538 for (size_t i = 0; i < request->num_output_buffers; i++) {
5539 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5540 }
5541
Thierry Strudel3d639192016-09-09 11:52:26 -07005542 if(mFlush) {
5543 LOGI("mFlush is true");
5544 pthread_mutex_unlock(&mMutex);
5545 return NO_ERROR;
5546 }
5547
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005548 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5549 // channel.
5550 if (!hdrPlusRequest) {
5551 int indexUsed;
5552 // Notify metadata channel we receive a request
5553 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005554
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005555 if(request->input_buffer != NULL){
5556 LOGD("Input request, frame_number %d", frameNumber);
5557 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5558 if (NO_ERROR != rc) {
5559 LOGE("fail to set reproc parameters");
5560 pthread_mutex_unlock(&mMutex);
5561 return rc;
5562 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005563 }
5564
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005565 // Call request on other streams
5566 uint32_t streams_need_metadata = 0;
5567 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5568 for (size_t i = 0; i < request->num_output_buffers; i++) {
5569 const camera3_stream_buffer_t& output = request->output_buffers[i];
5570 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5571
5572 if (channel == NULL) {
5573 LOGW("invalid channel pointer for stream");
5574 continue;
5575 }
5576
5577 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5578 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5579 output.buffer, request->input_buffer, frameNumber);
5580 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005581 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005582 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5583 if (rc < 0) {
5584 LOGE("Fail to request on picture channel");
5585 pthread_mutex_unlock(&mMutex);
5586 return rc;
5587 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005588 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005589 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5590 assert(NULL != mDepthChannel);
5591 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005592
Emilian Peev7650c122017-01-19 08:24:33 -08005593 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5594 if (rc < 0) {
5595 LOGE("Fail to map on depth buffer");
5596 pthread_mutex_unlock(&mMutex);
5597 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005598 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005599 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005600 } else {
5601 LOGD("snapshot request with buffer %p, frame_number %d",
5602 output.buffer, frameNumber);
5603 if (!request->settings) {
5604 rc = channel->request(output.buffer, frameNumber,
5605 NULL, mPrevParameters, indexUsed);
5606 } else {
5607 rc = channel->request(output.buffer, frameNumber,
5608 NULL, mParameters, indexUsed);
5609 }
5610 if (rc < 0) {
5611 LOGE("Fail to request on picture channel");
5612 pthread_mutex_unlock(&mMutex);
5613 return rc;
5614 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005615
Emilian Peev7650c122017-01-19 08:24:33 -08005616 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5617 uint32_t j = 0;
5618 for (j = 0; j < streamsArray.num_streams; j++) {
5619 if (streamsArray.stream_request[j].streamID == streamId) {
5620 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5621 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5622 else
5623 streamsArray.stream_request[j].buf_index = indexUsed;
5624 break;
5625 }
5626 }
5627 if (j == streamsArray.num_streams) {
5628 LOGE("Did not find matching stream to update index");
5629 assert(0);
5630 }
5631
5632 pendingBufferIter->need_metadata = true;
5633 streams_need_metadata++;
5634 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005635 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005636 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5637 bool needMetadata = false;
5638 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5639 rc = yuvChannel->request(output.buffer, frameNumber,
5640 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5641 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005642 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005643 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005644 pthread_mutex_unlock(&mMutex);
5645 return rc;
5646 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005647
5648 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5649 uint32_t j = 0;
5650 for (j = 0; j < streamsArray.num_streams; j++) {
5651 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005652 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5653 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5654 else
5655 streamsArray.stream_request[j].buf_index = indexUsed;
5656 break;
5657 }
5658 }
5659 if (j == streamsArray.num_streams) {
5660 LOGE("Did not find matching stream to update index");
5661 assert(0);
5662 }
5663
5664 pendingBufferIter->need_metadata = needMetadata;
5665 if (needMetadata)
5666 streams_need_metadata += 1;
5667 LOGD("calling YUV channel request, need_metadata is %d",
5668 needMetadata);
5669 } else {
5670 LOGD("request with buffer %p, frame_number %d",
5671 output.buffer, frameNumber);
5672
5673 rc = channel->request(output.buffer, frameNumber, indexUsed);
5674
5675 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5676 uint32_t j = 0;
5677 for (j = 0; j < streamsArray.num_streams; j++) {
5678 if (streamsArray.stream_request[j].streamID == streamId) {
5679 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5680 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5681 else
5682 streamsArray.stream_request[j].buf_index = indexUsed;
5683 break;
5684 }
5685 }
5686 if (j == streamsArray.num_streams) {
5687 LOGE("Did not find matching stream to update index");
5688 assert(0);
5689 }
5690
5691 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5692 && mBatchSize) {
5693 mToBeQueuedVidBufs++;
5694 if (mToBeQueuedVidBufs == mBatchSize) {
5695 channel->queueBatchBuf();
5696 }
5697 }
5698 if (rc < 0) {
5699 LOGE("request failed");
5700 pthread_mutex_unlock(&mMutex);
5701 return rc;
5702 }
5703 }
5704 pendingBufferIter++;
5705 }
5706
5707 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5708 itr++) {
5709 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5710
5711 if (channel == NULL) {
5712 LOGE("invalid channel pointer for stream");
5713 assert(0);
5714 return BAD_VALUE;
5715 }
5716
5717 InternalRequest requestedStream;
5718 requestedStream = (*itr);
5719
5720
5721 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5722 LOGD("snapshot request internally input buffer %p, frame_number %d",
5723 request->input_buffer, frameNumber);
5724 if(request->input_buffer != NULL){
5725 rc = channel->request(NULL, frameNumber,
5726 pInputBuffer, &mReprocMeta, indexUsed, true,
5727 requestedStream.meteringOnly);
5728 if (rc < 0) {
5729 LOGE("Fail to request on picture channel");
5730 pthread_mutex_unlock(&mMutex);
5731 return rc;
5732 }
5733 } else {
5734 LOGD("snapshot request with frame_number %d", frameNumber);
5735 if (!request->settings) {
5736 rc = channel->request(NULL, frameNumber,
5737 NULL, mPrevParameters, indexUsed, true,
5738 requestedStream.meteringOnly);
5739 } else {
5740 rc = channel->request(NULL, frameNumber,
5741 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5742 }
5743 if (rc < 0) {
5744 LOGE("Fail to request on picture channel");
5745 pthread_mutex_unlock(&mMutex);
5746 return rc;
5747 }
5748
5749 if ((*itr).meteringOnly != 1) {
5750 requestedStream.need_metadata = 1;
5751 streams_need_metadata++;
5752 }
5753 }
5754
5755 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5756 uint32_t j = 0;
5757 for (j = 0; j < streamsArray.num_streams; j++) {
5758 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005759 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5760 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5761 else
5762 streamsArray.stream_request[j].buf_index = indexUsed;
5763 break;
5764 }
5765 }
5766 if (j == streamsArray.num_streams) {
5767 LOGE("Did not find matching stream to update index");
5768 assert(0);
5769 }
5770
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005771 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005772 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005773 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005774 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005775 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005776 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005777 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005778
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005779 //If 2 streams have need_metadata set to true, fail the request, unless
5780 //we copy/reference count the metadata buffer
5781 if (streams_need_metadata > 1) {
5782 LOGE("not supporting request in which two streams requires"
5783 " 2 HAL metadata for reprocessing");
5784 pthread_mutex_unlock(&mMutex);
5785 return -EINVAL;
5786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005787
Emilian Peev656e4fa2017-06-02 16:47:04 +01005788 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5789 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5790 if (depthRequestPresent && mDepthChannel) {
5791 if (request->settings) {
5792 camera_metadata_ro_entry entry;
5793 if (find_camera_metadata_ro_entry(request->settings,
5794 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5795 if (entry.data.u8[0]) {
5796 pdafEnable = CAM_PD_DATA_ENABLED;
5797 } else {
5798 pdafEnable = CAM_PD_DATA_SKIP;
5799 }
5800 mDepthCloudMode = pdafEnable;
5801 } else {
5802 pdafEnable = mDepthCloudMode;
5803 }
5804 } else {
5805 pdafEnable = mDepthCloudMode;
5806 }
5807 }
5808
Emilian Peev7650c122017-01-19 08:24:33 -08005809 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5810 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5811 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5812 pthread_mutex_unlock(&mMutex);
5813 return BAD_VALUE;
5814 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005815
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005816 if (request->input_buffer == NULL) {
5817 /* Set the parameters to backend:
5818 * - For every request in NORMAL MODE
5819 * - For every request in HFR mode during preview only case
5820 * - Once every batch in HFR mode during video recording
5821 */
5822 if (!mBatchSize ||
5823 (mBatchSize && !isVidBufRequested) ||
5824 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5825 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5826 mBatchSize, isVidBufRequested,
5827 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005828
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005829 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5830 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5831 uint32_t m = 0;
5832 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5833 if (streamsArray.stream_request[k].streamID ==
5834 mBatchedStreamsArray.stream_request[m].streamID)
5835 break;
5836 }
5837 if (m == mBatchedStreamsArray.num_streams) {
5838 mBatchedStreamsArray.stream_request\
5839 [mBatchedStreamsArray.num_streams].streamID =
5840 streamsArray.stream_request[k].streamID;
5841 mBatchedStreamsArray.stream_request\
5842 [mBatchedStreamsArray.num_streams].buf_index =
5843 streamsArray.stream_request[k].buf_index;
5844 mBatchedStreamsArray.num_streams =
5845 mBatchedStreamsArray.num_streams + 1;
5846 }
5847 }
5848 streamsArray = mBatchedStreamsArray;
5849 }
5850 /* Update stream id of all the requested buffers */
5851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5852 streamsArray)) {
5853 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005854 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005855 return BAD_VALUE;
5856 }
5857
5858 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5859 mParameters);
5860 if (rc < 0) {
5861 LOGE("set_parms failed");
5862 }
5863 /* reset to zero coz, the batch is queued */
5864 mToBeQueuedVidBufs = 0;
5865 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5866 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5867 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005868 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5869 uint32_t m = 0;
5870 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5871 if (streamsArray.stream_request[k].streamID ==
5872 mBatchedStreamsArray.stream_request[m].streamID)
5873 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005874 }
5875 if (m == mBatchedStreamsArray.num_streams) {
5876 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5877 streamID = streamsArray.stream_request[k].streamID;
5878 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5879 buf_index = streamsArray.stream_request[k].buf_index;
5880 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5881 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005882 }
5883 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005884 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005885
5886 // Start all streams after the first setting is sent, so that the
5887 // setting can be applied sooner: (0 + apply_delay)th frame.
5888 if (mState == CONFIGURED && mChannelHandle) {
5889 //Then start them.
5890 LOGH("Start META Channel");
5891 rc = mMetadataChannel->start();
5892 if (rc < 0) {
5893 LOGE("META channel start failed");
5894 pthread_mutex_unlock(&mMutex);
5895 return rc;
5896 }
5897
5898 if (mAnalysisChannel) {
5899 rc = mAnalysisChannel->start();
5900 if (rc < 0) {
5901 LOGE("Analysis channel start failed");
5902 mMetadataChannel->stop();
5903 pthread_mutex_unlock(&mMutex);
5904 return rc;
5905 }
5906 }
5907
5908 if (mSupportChannel) {
5909 rc = mSupportChannel->start();
5910 if (rc < 0) {
5911 LOGE("Support channel start failed");
5912 mMetadataChannel->stop();
5913 /* Although support and analysis are mutually exclusive today
5914 adding it in anycase for future proofing */
5915 if (mAnalysisChannel) {
5916 mAnalysisChannel->stop();
5917 }
5918 pthread_mutex_unlock(&mMutex);
5919 return rc;
5920 }
5921 }
5922 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5923 it != mStreamInfo.end(); it++) {
5924 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5925 LOGH("Start Processing Channel mask=%d",
5926 channel->getStreamTypeMask());
5927 rc = channel->start();
5928 if (rc < 0) {
5929 LOGE("channel start failed");
5930 pthread_mutex_unlock(&mMutex);
5931 return rc;
5932 }
5933 }
5934
5935 if (mRawDumpChannel) {
5936 LOGD("Starting raw dump stream");
5937 rc = mRawDumpChannel->start();
5938 if (rc != NO_ERROR) {
5939 LOGE("Error Starting Raw Dump Channel");
5940 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5941 it != mStreamInfo.end(); it++) {
5942 QCamera3Channel *channel =
5943 (QCamera3Channel *)(*it)->stream->priv;
5944 LOGH("Stopping Processing Channel mask=%d",
5945 channel->getStreamTypeMask());
5946 channel->stop();
5947 }
5948 if (mSupportChannel)
5949 mSupportChannel->stop();
5950 if (mAnalysisChannel) {
5951 mAnalysisChannel->stop();
5952 }
5953 mMetadataChannel->stop();
5954 pthread_mutex_unlock(&mMutex);
5955 return rc;
5956 }
5957 }
5958
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005959 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005960 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005961 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005962 if (rc != NO_ERROR) {
5963 LOGE("start_channel failed %d", rc);
5964 pthread_mutex_unlock(&mMutex);
5965 return rc;
5966 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005967
5968 {
5969 // Configure Easel for stream on.
5970 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005971
5972 // Now that sensor mode should have been selected, get the selected sensor mode
5973 // info.
5974 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5975 getCurrentSensorModeInfo(mSensorModeInfo);
5976
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005977 if (EaselManagerClientOpened) {
5978 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen44abb642017-06-02 18:00:38 -07005979 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5980 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005981 if (rc != OK) {
5982 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5983 mCameraId, mSensorModeInfo.op_pixel_clk);
5984 pthread_mutex_unlock(&mMutex);
5985 return rc;
5986 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005987 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005988 }
5989 }
5990
5991 // Start sensor streaming.
5992 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5993 mChannelHandle);
5994 if (rc != NO_ERROR) {
5995 LOGE("start_sensor_stream_on failed %d", rc);
5996 pthread_mutex_unlock(&mMutex);
5997 return rc;
5998 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005999 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006000 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006001 }
6002
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006003 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07006004 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006005 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -07006006 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006007 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6008 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6009 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6010 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
6011 rc = enableHdrPlusModeLocked();
6012 if (rc != OK) {
6013 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6014 pthread_mutex_unlock(&mMutex);
6015 return rc;
6016 }
6017
6018 mFirstPreviewIntentSeen = true;
6019 }
6020 }
6021
Thierry Strudel3d639192016-09-09 11:52:26 -07006022 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6023
6024 mState = STARTED;
6025 // Added a timed condition wait
6026 struct timespec ts;
6027 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006028 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006029 if (rc < 0) {
6030 isValidTimeout = 0;
6031 LOGE("Error reading the real time clock!!");
6032 }
6033 else {
6034 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006035 int64_t timeout = 5;
6036 {
6037 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6038 // If there is a pending HDR+ request, the following requests may be blocked until the
6039 // HDR+ request is done. So allow a longer timeout.
6040 if (mHdrPlusPendingRequests.size() > 0) {
6041 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6042 }
6043 }
6044 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006045 }
6046 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006047 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006048 (mState != ERROR) && (mState != DEINIT)) {
6049 if (!isValidTimeout) {
6050 LOGD("Blocking on conditional wait");
6051 pthread_cond_wait(&mRequestCond, &mMutex);
6052 }
6053 else {
6054 LOGD("Blocking on timed conditional wait");
6055 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6056 if (rc == ETIMEDOUT) {
6057 rc = -ENODEV;
6058 LOGE("Unblocked on timeout!!!!");
6059 break;
6060 }
6061 }
6062 LOGD("Unblocked");
6063 if (mWokenUpByDaemon) {
6064 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006065 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006066 break;
6067 }
6068 }
6069 pthread_mutex_unlock(&mMutex);
6070
6071 return rc;
6072}
6073
6074/*===========================================================================
6075 * FUNCTION : dump
6076 *
6077 * DESCRIPTION:
6078 *
6079 * PARAMETERS :
6080 *
6081 *
6082 * RETURN :
6083 *==========================================================================*/
6084void QCamera3HardwareInterface::dump(int fd)
6085{
6086 pthread_mutex_lock(&mMutex);
6087 dprintf(fd, "\n Camera HAL3 information Begin \n");
6088
6089 dprintf(fd, "\nNumber of pending requests: %zu \n",
6090 mPendingRequestsList.size());
6091 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6092 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6093 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6094 for(pendingRequestIterator i = mPendingRequestsList.begin();
6095 i != mPendingRequestsList.end(); i++) {
6096 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6097 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6098 i->input_buffer);
6099 }
6100 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6101 mPendingBuffersMap.get_num_overall_buffers());
6102 dprintf(fd, "-------+------------------\n");
6103 dprintf(fd, " Frame | Stream type mask \n");
6104 dprintf(fd, "-------+------------------\n");
6105 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6106 for(auto &j : req.mPendingBufferList) {
6107 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6108 dprintf(fd, " %5d | %11d \n",
6109 req.frame_number, channel->getStreamTypeMask());
6110 }
6111 }
6112 dprintf(fd, "-------+------------------\n");
6113
6114 dprintf(fd, "\nPending frame drop list: %zu\n",
6115 mPendingFrameDropList.size());
6116 dprintf(fd, "-------+-----------\n");
6117 dprintf(fd, " Frame | Stream ID \n");
6118 dprintf(fd, "-------+-----------\n");
6119 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6120 i != mPendingFrameDropList.end(); i++) {
6121 dprintf(fd, " %5d | %9d \n",
6122 i->frame_number, i->stream_ID);
6123 }
6124 dprintf(fd, "-------+-----------\n");
6125
6126 dprintf(fd, "\n Camera HAL3 information End \n");
6127
6128 /* use dumpsys media.camera as trigger to send update debug level event */
6129 mUpdateDebugLevel = true;
6130 pthread_mutex_unlock(&mMutex);
6131 return;
6132}
6133
6134/*===========================================================================
6135 * FUNCTION : flush
6136 *
6137 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6138 * conditionally restarts channels
6139 *
6140 * PARAMETERS :
6141 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006142 * @ stopChannelImmediately: stop the channel immediately. This should be used
6143 * when device encountered an error and MIPI may has
6144 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006145 *
6146 * RETURN :
6147 * 0 on success
6148 * Error code on failure
6149 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006150int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006151{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006152 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006153 int32_t rc = NO_ERROR;
6154
6155 LOGD("Unblocking Process Capture Request");
6156 pthread_mutex_lock(&mMutex);
6157 mFlush = true;
6158 pthread_mutex_unlock(&mMutex);
6159
6160 rc = stopAllChannels();
6161 // unlink of dualcam
6162 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006163 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6164 &m_pDualCamCmdPtr->bundle_info;
6165 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006166 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6167 pthread_mutex_lock(&gCamLock);
6168
6169 if (mIsMainCamera == 1) {
6170 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6171 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006172 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006173 // related session id should be session id of linked session
6174 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6175 } else {
6176 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6177 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006178 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006179 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6180 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006181 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006182 pthread_mutex_unlock(&gCamLock);
6183
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006184 rc = mCameraHandle->ops->set_dual_cam_cmd(
6185 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006186 if (rc < 0) {
6187 LOGE("Dualcam: Unlink failed, but still proceed to close");
6188 }
6189 }
6190
6191 if (rc < 0) {
6192 LOGE("stopAllChannels failed");
6193 return rc;
6194 }
6195 if (mChannelHandle) {
6196 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006197 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006198 }
6199
6200 // Reset bundle info
6201 rc = setBundleInfo();
6202 if (rc < 0) {
6203 LOGE("setBundleInfo failed %d", rc);
6204 return rc;
6205 }
6206
6207 // Mutex Lock
6208 pthread_mutex_lock(&mMutex);
6209
6210 // Unblock process_capture_request
6211 mPendingLiveRequest = 0;
6212 pthread_cond_signal(&mRequestCond);
6213
6214 rc = notifyErrorForPendingRequests();
6215 if (rc < 0) {
6216 LOGE("notifyErrorForPendingRequests failed");
6217 pthread_mutex_unlock(&mMutex);
6218 return rc;
6219 }
6220
6221 mFlush = false;
6222
6223 // Start the Streams/Channels
6224 if (restartChannels) {
6225 rc = startAllChannels();
6226 if (rc < 0) {
6227 LOGE("startAllChannels failed");
6228 pthread_mutex_unlock(&mMutex);
6229 return rc;
6230 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006231 if (mChannelHandle) {
6232 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006233 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006234 if (rc < 0) {
6235 LOGE("start_channel failed");
6236 pthread_mutex_unlock(&mMutex);
6237 return rc;
6238 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006239 }
6240 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006241 pthread_mutex_unlock(&mMutex);
6242
6243 return 0;
6244}
6245
6246/*===========================================================================
6247 * FUNCTION : flushPerf
6248 *
6249 * DESCRIPTION: This is the performance optimization version of flush that does
6250 * not use stream off, rather flushes the system
6251 *
6252 * PARAMETERS :
6253 *
6254 *
6255 * RETURN : 0 : success
6256 * -EINVAL: input is malformed (device is not valid)
6257 * -ENODEV: if the device has encountered a serious error
6258 *==========================================================================*/
6259int QCamera3HardwareInterface::flushPerf()
6260{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006261 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006262 int32_t rc = 0;
6263 struct timespec timeout;
6264 bool timed_wait = false;
6265
6266 pthread_mutex_lock(&mMutex);
6267 mFlushPerf = true;
6268 mPendingBuffersMap.numPendingBufsAtFlush =
6269 mPendingBuffersMap.get_num_overall_buffers();
6270 LOGD("Calling flush. Wait for %d buffers to return",
6271 mPendingBuffersMap.numPendingBufsAtFlush);
6272
6273 /* send the flush event to the backend */
6274 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6275 if (rc < 0) {
6276 LOGE("Error in flush: IOCTL failure");
6277 mFlushPerf = false;
6278 pthread_mutex_unlock(&mMutex);
6279 return -ENODEV;
6280 }
6281
6282 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6283 LOGD("No pending buffers in HAL, return flush");
6284 mFlushPerf = false;
6285 pthread_mutex_unlock(&mMutex);
6286 return rc;
6287 }
6288
6289 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006290 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006291 if (rc < 0) {
6292 LOGE("Error reading the real time clock, cannot use timed wait");
6293 } else {
6294 timeout.tv_sec += FLUSH_TIMEOUT;
6295 timed_wait = true;
6296 }
6297
6298 //Block on conditional variable
6299 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6300 LOGD("Waiting on mBuffersCond");
6301 if (!timed_wait) {
6302 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6303 if (rc != 0) {
6304 LOGE("pthread_cond_wait failed due to rc = %s",
6305 strerror(rc));
6306 break;
6307 }
6308 } else {
6309 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6310 if (rc != 0) {
6311 LOGE("pthread_cond_timedwait failed due to rc = %s",
6312 strerror(rc));
6313 break;
6314 }
6315 }
6316 }
6317 if (rc != 0) {
6318 mFlushPerf = false;
6319 pthread_mutex_unlock(&mMutex);
6320 return -ENODEV;
6321 }
6322
6323 LOGD("Received buffers, now safe to return them");
6324
6325 //make sure the channels handle flush
6326 //currently only required for the picture channel to release snapshot resources
6327 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6328 it != mStreamInfo.end(); it++) {
6329 QCamera3Channel *channel = (*it)->channel;
6330 if (channel) {
6331 rc = channel->flush();
6332 if (rc) {
6333 LOGE("Flushing the channels failed with error %d", rc);
6334 // even though the channel flush failed we need to continue and
6335 // return the buffers we have to the framework, however the return
6336 // value will be an error
6337 rc = -ENODEV;
6338 }
6339 }
6340 }
6341
6342 /* notify the frameworks and send errored results */
6343 rc = notifyErrorForPendingRequests();
6344 if (rc < 0) {
6345 LOGE("notifyErrorForPendingRequests failed");
6346 pthread_mutex_unlock(&mMutex);
6347 return rc;
6348 }
6349
6350 //unblock process_capture_request
6351 mPendingLiveRequest = 0;
6352 unblockRequestIfNecessary();
6353
6354 mFlushPerf = false;
6355 pthread_mutex_unlock(&mMutex);
6356 LOGD ("Flush Operation complete. rc = %d", rc);
6357 return rc;
6358}
6359
6360/*===========================================================================
6361 * FUNCTION : handleCameraDeviceError
6362 *
6363 * DESCRIPTION: This function calls internal flush and notifies the error to
6364 * framework and updates the state variable.
6365 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006366 * PARAMETERS :
6367 * @stopChannelImmediately : stop channels immediately without waiting for
6368 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006369 *
6370 * RETURN : NO_ERROR on Success
6371 * Error code on failure
6372 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006373int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006374{
6375 int32_t rc = NO_ERROR;
6376
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006377 {
6378 Mutex::Autolock lock(mFlushLock);
6379 pthread_mutex_lock(&mMutex);
6380 if (mState != ERROR) {
6381 //if mState != ERROR, nothing to be done
6382 pthread_mutex_unlock(&mMutex);
6383 return NO_ERROR;
6384 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006385 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006386
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006387 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006388 if (NO_ERROR != rc) {
6389 LOGE("internal flush to handle mState = ERROR failed");
6390 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006391
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006392 pthread_mutex_lock(&mMutex);
6393 mState = DEINIT;
6394 pthread_mutex_unlock(&mMutex);
6395 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006396
6397 camera3_notify_msg_t notify_msg;
6398 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6399 notify_msg.type = CAMERA3_MSG_ERROR;
6400 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6401 notify_msg.message.error.error_stream = NULL;
6402 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006403 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006404
6405 return rc;
6406}
6407
6408/*===========================================================================
6409 * FUNCTION : captureResultCb
6410 *
6411 * DESCRIPTION: Callback handler for all capture result
6412 * (streams, as well as metadata)
6413 *
6414 * PARAMETERS :
6415 * @metadata : metadata information
6416 * @buffer : actual gralloc buffer to be returned to frameworks.
6417 * NULL if metadata.
6418 *
6419 * RETURN : NONE
6420 *==========================================================================*/
6421void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6422 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6423{
6424 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006425 pthread_mutex_lock(&mMutex);
6426 uint8_t batchSize = mBatchSize;
6427 pthread_mutex_unlock(&mMutex);
6428 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006429 handleBatchMetadata(metadata_buf,
6430 true /* free_and_bufdone_meta_buf */);
6431 } else { /* mBatchSize = 0 */
6432 hdrPlusPerfLock(metadata_buf);
6433 pthread_mutex_lock(&mMutex);
6434 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006435 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006436 true /* last urgent frame of batch metadata */,
6437 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006438 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006439 pthread_mutex_unlock(&mMutex);
6440 }
6441 } else if (isInputBuffer) {
6442 pthread_mutex_lock(&mMutex);
6443 handleInputBufferWithLock(frame_number);
6444 pthread_mutex_unlock(&mMutex);
6445 } else {
6446 pthread_mutex_lock(&mMutex);
6447 handleBufferWithLock(buffer, frame_number);
6448 pthread_mutex_unlock(&mMutex);
6449 }
6450 return;
6451}
6452
6453/*===========================================================================
6454 * FUNCTION : getReprocessibleOutputStreamId
6455 *
6456 * DESCRIPTION: Get source output stream id for the input reprocess stream
6457 * based on size and format, which would be the largest
6458 * output stream if an input stream exists.
6459 *
6460 * PARAMETERS :
6461 * @id : return the stream id if found
6462 *
6463 * RETURN : int32_t type of status
6464 * NO_ERROR -- success
6465 * none-zero failure code
6466 *==========================================================================*/
6467int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6468{
6469 /* check if any output or bidirectional stream with the same size and format
6470 and return that stream */
6471 if ((mInputStreamInfo.dim.width > 0) &&
6472 (mInputStreamInfo.dim.height > 0)) {
6473 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6474 it != mStreamInfo.end(); it++) {
6475
6476 camera3_stream_t *stream = (*it)->stream;
6477 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6478 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6479 (stream->format == mInputStreamInfo.format)) {
6480 // Usage flag for an input stream and the source output stream
6481 // may be different.
6482 LOGD("Found reprocessible output stream! %p", *it);
6483 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6484 stream->usage, mInputStreamInfo.usage);
6485
6486 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6487 if (channel != NULL && channel->mStreams[0]) {
6488 id = channel->mStreams[0]->getMyServerID();
6489 return NO_ERROR;
6490 }
6491 }
6492 }
6493 } else {
6494 LOGD("No input stream, so no reprocessible output stream");
6495 }
6496 return NAME_NOT_FOUND;
6497}
6498
6499/*===========================================================================
6500 * FUNCTION : lookupFwkName
6501 *
6502 * DESCRIPTION: In case the enum is not same in fwk and backend
6503 * make sure the parameter is correctly propogated
6504 *
6505 * PARAMETERS :
6506 * @arr : map between the two enums
6507 * @len : len of the map
6508 * @hal_name : name of the hal_parm to map
6509 *
6510 * RETURN : int type of status
6511 * fwk_name -- success
6512 * none-zero failure code
6513 *==========================================================================*/
6514template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6515 size_t len, halType hal_name)
6516{
6517
6518 for (size_t i = 0; i < len; i++) {
6519 if (arr[i].hal_name == hal_name) {
6520 return arr[i].fwk_name;
6521 }
6522 }
6523
6524 /* Not able to find matching framework type is not necessarily
6525 * an error case. This happens when mm-camera supports more attributes
6526 * than the frameworks do */
6527 LOGH("Cannot find matching framework type");
6528 return NAME_NOT_FOUND;
6529}
6530
6531/*===========================================================================
6532 * FUNCTION : lookupHalName
6533 *
6534 * DESCRIPTION: In case the enum is not same in fwk and backend
6535 * make sure the parameter is correctly propogated
6536 *
6537 * PARAMETERS :
6538 * @arr : map between the two enums
6539 * @len : len of the map
6540 * @fwk_name : name of the hal_parm to map
6541 *
6542 * RETURN : int32_t type of status
6543 * hal_name -- success
6544 * none-zero failure code
6545 *==========================================================================*/
6546template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6547 size_t len, fwkType fwk_name)
6548{
6549 for (size_t i = 0; i < len; i++) {
6550 if (arr[i].fwk_name == fwk_name) {
6551 return arr[i].hal_name;
6552 }
6553 }
6554
6555 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6556 return NAME_NOT_FOUND;
6557}
6558
6559/*===========================================================================
6560 * FUNCTION : lookupProp
6561 *
6562 * DESCRIPTION: lookup a value by its name
6563 *
6564 * PARAMETERS :
6565 * @arr : map between the two enums
6566 * @len : size of the map
6567 * @name : name to be looked up
6568 *
6569 * RETURN : Value if found
6570 * CAM_CDS_MODE_MAX if not found
6571 *==========================================================================*/
6572template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6573 size_t len, const char *name)
6574{
6575 if (name) {
6576 for (size_t i = 0; i < len; i++) {
6577 if (!strcmp(arr[i].desc, name)) {
6578 return arr[i].val;
6579 }
6580 }
6581 }
6582 return CAM_CDS_MODE_MAX;
6583}
6584
6585/*===========================================================================
6586 *
6587 * DESCRIPTION:
6588 *
6589 * PARAMETERS :
6590 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006591 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006592 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006593 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6594 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006595 *
6596 * RETURN : camera_metadata_t*
6597 * metadata in a format specified by fwk
6598 *==========================================================================*/
6599camera_metadata_t*
6600QCamera3HardwareInterface::translateFromHalMetadata(
6601 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006602 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006603 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006604 bool lastMetadataInBatch,
6605 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006606{
6607 CameraMetadata camMetadata;
6608 camera_metadata_t *resultMetadata;
6609
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006610 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006611 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6612 * Timestamp is needed because it's used for shutter notify calculation.
6613 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006614 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006615 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006616 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006617 }
6618
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006619 if (pendingRequest.jpegMetadata.entryCount())
6620 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006621
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006622 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6623 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6624 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6625 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6626 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006627 if (mBatchSize == 0) {
6628 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006629 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006630 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006631
Samuel Ha68ba5172016-12-15 18:41:12 -08006632 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6633 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006634 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006635 // DevCamDebug metadata translateFromHalMetadata AF
6636 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6637 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6638 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6639 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6640 }
6641 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6642 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6643 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6644 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6645 }
6646 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6647 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6648 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6649 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6650 }
6651 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6652 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6653 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6654 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6655 }
6656 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6657 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6658 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6659 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6660 }
6661 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6662 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6663 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6664 *DevCamDebug_af_monitor_pdaf_target_pos;
6665 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6666 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6667 }
6668 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6669 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6670 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6671 *DevCamDebug_af_monitor_pdaf_confidence;
6672 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6673 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6674 }
6675 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6676 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6677 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6678 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6679 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6680 }
6681 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6682 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6683 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6684 *DevCamDebug_af_monitor_tof_target_pos;
6685 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6686 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6687 }
6688 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6689 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6690 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6691 *DevCamDebug_af_monitor_tof_confidence;
6692 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6693 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6694 }
6695 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6696 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6697 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6698 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6699 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6700 }
6701 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6702 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6703 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6704 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6705 &fwk_DevCamDebug_af_monitor_type_select, 1);
6706 }
6707 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6708 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6709 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6710 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6711 &fwk_DevCamDebug_af_monitor_refocus, 1);
6712 }
6713 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6714 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6715 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6716 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6717 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6718 }
6719 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6720 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6721 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6722 *DevCamDebug_af_search_pdaf_target_pos;
6723 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6724 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6725 }
6726 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6727 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6728 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6729 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6730 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6731 }
6732 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6733 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6734 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6735 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6736 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6737 }
6738 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6739 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6740 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6741 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6742 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6743 }
6744 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6745 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6746 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6747 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6748 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6749 }
6750 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6751 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6752 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6753 *DevCamDebug_af_search_tof_target_pos;
6754 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6755 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6756 }
6757 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6758 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6759 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6760 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6761 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6762 }
6763 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6764 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6765 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6766 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6767 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6768 }
6769 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6770 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6771 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6772 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6773 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6774 }
6775 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6776 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6777 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6778 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6779 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6780 }
6781 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6782 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6783 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6784 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6785 &fwk_DevCamDebug_af_search_type_select, 1);
6786 }
6787 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6788 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6789 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6790 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6791 &fwk_DevCamDebug_af_search_next_pos, 1);
6792 }
6793 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6794 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6795 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6796 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6797 &fwk_DevCamDebug_af_search_target_pos, 1);
6798 }
6799 // DevCamDebug metadata translateFromHalMetadata AEC
6800 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6801 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6802 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6803 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6804 }
6805 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6806 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6807 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6808 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6809 }
6810 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6811 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6812 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6813 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6814 }
6815 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6816 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6817 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6818 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6819 }
6820 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6821 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6822 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6823 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6824 }
6825 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6826 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6827 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6828 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6829 }
6830 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6831 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6832 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6833 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6834 }
6835 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6836 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6837 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6838 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6839 }
Samuel Ha34229982017-02-17 13:51:11 -08006840 // DevCamDebug metadata translateFromHalMetadata zzHDR
6841 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6842 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6843 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6844 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6845 }
6846 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6847 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006848 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006849 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6850 }
6851 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6852 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6853 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6854 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6855 }
6856 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6857 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006858 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006859 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6860 }
6861 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6862 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6863 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6864 *DevCamDebug_aec_hdr_sensitivity_ratio;
6865 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6866 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6867 }
6868 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6869 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6870 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6871 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6872 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6873 }
6874 // DevCamDebug metadata translateFromHalMetadata ADRC
6875 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6876 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6877 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6878 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6879 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6880 }
6881 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6882 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6883 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6884 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6885 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6886 }
6887 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6888 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6889 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6890 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6891 }
6892 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6893 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6894 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6895 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6896 }
6897 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6898 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6899 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6900 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6901 }
6902 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6903 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6904 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6905 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6906 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006907 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6908 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6909 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6910 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6911 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6912 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6913 }
6914 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6915 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6916 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6917 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6918 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6919 }
6920 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6921 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6922 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6923 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6924 &fwk_DevCamDebug_aec_subject_motion, 1);
6925 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006926 // DevCamDebug metadata translateFromHalMetadata AWB
6927 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6928 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6929 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6930 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6931 }
6932 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6933 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6934 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6935 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6936 }
6937 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6938 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6939 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6940 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6941 }
6942 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6943 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6944 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6945 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6946 }
6947 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6948 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6949 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6950 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6951 }
6952 }
6953 // atrace_end(ATRACE_TAG_ALWAYS);
6954
Thierry Strudel3d639192016-09-09 11:52:26 -07006955 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6956 int64_t fwk_frame_number = *frame_number;
6957 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6958 }
6959
6960 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6961 int32_t fps_range[2];
6962 fps_range[0] = (int32_t)float_range->min_fps;
6963 fps_range[1] = (int32_t)float_range->max_fps;
6964 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6965 fps_range, 2);
6966 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6967 fps_range[0], fps_range[1]);
6968 }
6969
6970 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6971 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6972 }
6973
6974 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6975 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6976 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6977 *sceneMode);
6978 if (NAME_NOT_FOUND != val) {
6979 uint8_t fwkSceneMode = (uint8_t)val;
6980 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6981 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6982 fwkSceneMode);
6983 }
6984 }
6985
6986 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6987 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6988 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6989 }
6990
6991 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6992 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6993 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6994 }
6995
6996 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6997 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6998 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6999 }
7000
7001 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7002 CAM_INTF_META_EDGE_MODE, metadata) {
7003 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7004 }
7005
7006 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7007 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7008 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7009 }
7010
7011 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7012 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7013 }
7014
7015 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7016 if (0 <= *flashState) {
7017 uint8_t fwk_flashState = (uint8_t) *flashState;
7018 if (!gCamCapability[mCameraId]->flash_available) {
7019 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7020 }
7021 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7022 }
7023 }
7024
7025 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7026 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7027 if (NAME_NOT_FOUND != val) {
7028 uint8_t fwk_flashMode = (uint8_t)val;
7029 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7030 }
7031 }
7032
7033 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7034 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7035 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7036 }
7037
7038 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7039 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7040 }
7041
7042 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7043 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7044 }
7045
7046 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7047 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7048 }
7049
7050 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7051 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7052 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7053 }
7054
7055 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7056 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7057 LOGD("fwk_videoStab = %d", fwk_videoStab);
7058 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7059 } else {
7060 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7061 // and so hardcoding the Video Stab result to OFF mode.
7062 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7063 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007064 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007065 }
7066
7067 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7068 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7069 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7070 }
7071
7072 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7073 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7074 }
7075
Thierry Strudel3d639192016-09-09 11:52:26 -07007076 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7077 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007078 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007079
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007080 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7081 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007082
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007083 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007084 blackLevelAppliedPattern->cam_black_level[0],
7085 blackLevelAppliedPattern->cam_black_level[1],
7086 blackLevelAppliedPattern->cam_black_level[2],
7087 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007088 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7089 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007090
7091#ifndef USE_HAL_3_3
7092 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307093 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007094 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307095 fwk_blackLevelInd[0] /= 16.0;
7096 fwk_blackLevelInd[1] /= 16.0;
7097 fwk_blackLevelInd[2] /= 16.0;
7098 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007099 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7100 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007101#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007102 }
7103
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007104#ifndef USE_HAL_3_3
7105 // Fixed whitelevel is used by ISP/Sensor
7106 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7107 &gCamCapability[mCameraId]->white_level, 1);
7108#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007109
7110 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7111 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7112 int32_t scalerCropRegion[4];
7113 scalerCropRegion[0] = hScalerCropRegion->left;
7114 scalerCropRegion[1] = hScalerCropRegion->top;
7115 scalerCropRegion[2] = hScalerCropRegion->width;
7116 scalerCropRegion[3] = hScalerCropRegion->height;
7117
7118 // Adjust crop region from sensor output coordinate system to active
7119 // array coordinate system.
7120 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7121 scalerCropRegion[2], scalerCropRegion[3]);
7122
7123 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7124 }
7125
7126 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7127 LOGD("sensorExpTime = %lld", *sensorExpTime);
7128 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7129 }
7130
7131 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7132 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7133 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7134 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7135 }
7136
7137 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7138 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7139 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7140 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7141 sensorRollingShutterSkew, 1);
7142 }
7143
7144 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7145 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7146 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7147
7148 //calculate the noise profile based on sensitivity
7149 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7150 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7151 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7152 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7153 noise_profile[i] = noise_profile_S;
7154 noise_profile[i+1] = noise_profile_O;
7155 }
7156 LOGD("noise model entry (S, O) is (%f, %f)",
7157 noise_profile_S, noise_profile_O);
7158 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7159 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7160 }
7161
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007162#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007163 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007164 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007165 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007166 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007167 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7168 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7169 }
7170 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007171#endif
7172
Thierry Strudel3d639192016-09-09 11:52:26 -07007173 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7174 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7175 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7176 }
7177
7178 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7179 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7180 *faceDetectMode);
7181 if (NAME_NOT_FOUND != val) {
7182 uint8_t fwk_faceDetectMode = (uint8_t)val;
7183 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7184
7185 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7186 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7187 CAM_INTF_META_FACE_DETECTION, metadata) {
7188 uint8_t numFaces = MIN(
7189 faceDetectionInfo->num_faces_detected, MAX_ROI);
7190 int32_t faceIds[MAX_ROI];
7191 uint8_t faceScores[MAX_ROI];
7192 int32_t faceRectangles[MAX_ROI * 4];
7193 int32_t faceLandmarks[MAX_ROI * 6];
7194 size_t j = 0, k = 0;
7195
7196 for (size_t i = 0; i < numFaces; i++) {
7197 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7198 // Adjust crop region from sensor output coordinate system to active
7199 // array coordinate system.
7200 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7201 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7202 rect.width, rect.height);
7203
7204 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7205 faceRectangles+j, -1);
7206
Jason Lee8ce36fa2017-04-19 19:40:37 -07007207 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7208 "bottom-right (%d, %d)",
7209 faceDetectionInfo->frame_id, i,
7210 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7211 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7212
Thierry Strudel3d639192016-09-09 11:52:26 -07007213 j+= 4;
7214 }
7215 if (numFaces <= 0) {
7216 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7217 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7218 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7219 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7220 }
7221
7222 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7223 numFaces);
7224 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7225 faceRectangles, numFaces * 4U);
7226 if (fwk_faceDetectMode ==
7227 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7228 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7229 CAM_INTF_META_FACE_LANDMARK, metadata) {
7230
7231 for (size_t i = 0; i < numFaces; i++) {
7232 // Map the co-ordinate sensor output coordinate system to active
7233 // array coordinate system.
7234 mCropRegionMapper.toActiveArray(
7235 landmarks->face_landmarks[i].left_eye_center.x,
7236 landmarks->face_landmarks[i].left_eye_center.y);
7237 mCropRegionMapper.toActiveArray(
7238 landmarks->face_landmarks[i].right_eye_center.x,
7239 landmarks->face_landmarks[i].right_eye_center.y);
7240 mCropRegionMapper.toActiveArray(
7241 landmarks->face_landmarks[i].mouth_center.x,
7242 landmarks->face_landmarks[i].mouth_center.y);
7243
7244 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007245
7246 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7247 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7248 faceDetectionInfo->frame_id, i,
7249 faceLandmarks[k + LEFT_EYE_X],
7250 faceLandmarks[k + LEFT_EYE_Y],
7251 faceLandmarks[k + RIGHT_EYE_X],
7252 faceLandmarks[k + RIGHT_EYE_Y],
7253 faceLandmarks[k + MOUTH_X],
7254 faceLandmarks[k + MOUTH_Y]);
7255
Thierry Strudel04e026f2016-10-10 11:27:36 -07007256 k+= TOTAL_LANDMARK_INDICES;
7257 }
7258 } else {
7259 for (size_t i = 0; i < numFaces; i++) {
7260 setInvalidLandmarks(faceLandmarks+k);
7261 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007262 }
7263 }
7264
Jason Lee49619db2017-04-13 12:07:22 -07007265 for (size_t i = 0; i < numFaces; i++) {
7266 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7267
7268 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7269 faceDetectionInfo->frame_id, i, faceIds[i]);
7270 }
7271
Thierry Strudel3d639192016-09-09 11:52:26 -07007272 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7273 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7274 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007275 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007276 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7277 CAM_INTF_META_FACE_BLINK, metadata) {
7278 uint8_t detected[MAX_ROI];
7279 uint8_t degree[MAX_ROI * 2];
7280 for (size_t i = 0; i < numFaces; i++) {
7281 detected[i] = blinks->blink[i].blink_detected;
7282 degree[2 * i] = blinks->blink[i].left_blink;
7283 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007284
Jason Lee49619db2017-04-13 12:07:22 -07007285 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7286 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7287 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7288 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007289 }
7290 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7291 detected, numFaces);
7292 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7293 degree, numFaces * 2);
7294 }
7295 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7296 CAM_INTF_META_FACE_SMILE, metadata) {
7297 uint8_t degree[MAX_ROI];
7298 uint8_t confidence[MAX_ROI];
7299 for (size_t i = 0; i < numFaces; i++) {
7300 degree[i] = smiles->smile[i].smile_degree;
7301 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007302
Jason Lee49619db2017-04-13 12:07:22 -07007303 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7304 "smile_degree=%d, smile_score=%d",
7305 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007306 }
7307 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7308 degree, numFaces);
7309 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7310 confidence, numFaces);
7311 }
7312 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7313 CAM_INTF_META_FACE_GAZE, metadata) {
7314 int8_t angle[MAX_ROI];
7315 int32_t direction[MAX_ROI * 3];
7316 int8_t degree[MAX_ROI * 2];
7317 for (size_t i = 0; i < numFaces; i++) {
7318 angle[i] = gazes->gaze[i].gaze_angle;
7319 direction[3 * i] = gazes->gaze[i].updown_dir;
7320 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7321 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7322 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7323 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007324
7325 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7326 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7327 "left_right_gaze=%d, top_bottom_gaze=%d",
7328 faceDetectionInfo->frame_id, i, angle[i],
7329 direction[3 * i], direction[3 * i + 1],
7330 direction[3 * i + 2],
7331 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007332 }
7333 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7334 (uint8_t *)angle, numFaces);
7335 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7336 direction, numFaces * 3);
7337 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7338 (uint8_t *)degree, numFaces * 2);
7339 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007340 }
7341 }
7342 }
7343 }
7344
7345 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7346 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007347 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007348 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007349 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007350
Shuzhen Wang14415f52016-11-16 18:26:18 -08007351 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7352 histogramBins = *histBins;
7353 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7354 }
7355
7356 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007357 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7358 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007359 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007360
7361 switch (stats_data->type) {
7362 case CAM_HISTOGRAM_TYPE_BAYER:
7363 switch (stats_data->bayer_stats.data_type) {
7364 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007365 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7366 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007367 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007368 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7369 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007370 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007371 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7372 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007373 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007374 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007375 case CAM_STATS_CHANNEL_R:
7376 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007377 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7378 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007379 }
7380 break;
7381 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007382 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007383 break;
7384 }
7385
Shuzhen Wang14415f52016-11-16 18:26:18 -08007386 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007387 }
7388 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007389 }
7390
7391 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7392 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7393 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7394 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7395 }
7396
7397 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7398 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7399 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7400 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7401 }
7402
7403 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7404 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7405 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7406 CAM_MAX_SHADING_MAP_HEIGHT);
7407 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7408 CAM_MAX_SHADING_MAP_WIDTH);
7409 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7410 lensShadingMap->lens_shading, 4U * map_width * map_height);
7411 }
7412
7413 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7414 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7415 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7416 }
7417
7418 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7419 //Populate CAM_INTF_META_TONEMAP_CURVES
7420 /* ch0 = G, ch 1 = B, ch 2 = R*/
7421 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7422 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7423 tonemap->tonemap_points_cnt,
7424 CAM_MAX_TONEMAP_CURVE_SIZE);
7425 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7426 }
7427
7428 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7429 &tonemap->curves[0].tonemap_points[0][0],
7430 tonemap->tonemap_points_cnt * 2);
7431
7432 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7433 &tonemap->curves[1].tonemap_points[0][0],
7434 tonemap->tonemap_points_cnt * 2);
7435
7436 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7437 &tonemap->curves[2].tonemap_points[0][0],
7438 tonemap->tonemap_points_cnt * 2);
7439 }
7440
7441 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7442 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7443 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7444 CC_GAIN_MAX);
7445 }
7446
7447 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7448 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7449 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7450 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7451 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7452 }
7453
7454 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7455 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7456 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7457 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7458 toneCurve->tonemap_points_cnt,
7459 CAM_MAX_TONEMAP_CURVE_SIZE);
7460 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7461 }
7462 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7463 (float*)toneCurve->curve.tonemap_points,
7464 toneCurve->tonemap_points_cnt * 2);
7465 }
7466
7467 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7468 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7469 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7470 predColorCorrectionGains->gains, 4);
7471 }
7472
7473 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7474 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7475 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7476 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7477 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7478 }
7479
7480 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7481 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7482 }
7483
7484 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7485 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7486 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7487 }
7488
7489 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7490 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7491 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7492 }
7493
7494 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7495 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7496 *effectMode);
7497 if (NAME_NOT_FOUND != val) {
7498 uint8_t fwk_effectMode = (uint8_t)val;
7499 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7500 }
7501 }
7502
7503 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7504 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7505 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7506 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7507 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7508 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7509 }
7510 int32_t fwk_testPatternData[4];
7511 fwk_testPatternData[0] = testPatternData->r;
7512 fwk_testPatternData[3] = testPatternData->b;
7513 switch (gCamCapability[mCameraId]->color_arrangement) {
7514 case CAM_FILTER_ARRANGEMENT_RGGB:
7515 case CAM_FILTER_ARRANGEMENT_GRBG:
7516 fwk_testPatternData[1] = testPatternData->gr;
7517 fwk_testPatternData[2] = testPatternData->gb;
7518 break;
7519 case CAM_FILTER_ARRANGEMENT_GBRG:
7520 case CAM_FILTER_ARRANGEMENT_BGGR:
7521 fwk_testPatternData[2] = testPatternData->gr;
7522 fwk_testPatternData[1] = testPatternData->gb;
7523 break;
7524 default:
7525 LOGE("color arrangement %d is not supported",
7526 gCamCapability[mCameraId]->color_arrangement);
7527 break;
7528 }
7529 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7530 }
7531
7532 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7533 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7534 }
7535
7536 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7537 String8 str((const char *)gps_methods);
7538 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7539 }
7540
7541 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7542 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7543 }
7544
7545 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7546 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7547 }
7548
7549 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7550 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7551 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7552 }
7553
7554 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7555 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7556 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7557 }
7558
7559 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7560 int32_t fwk_thumb_size[2];
7561 fwk_thumb_size[0] = thumb_size->width;
7562 fwk_thumb_size[1] = thumb_size->height;
7563 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7564 }
7565
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007566 // Skip reprocess metadata if there is no input stream.
7567 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7568 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7569 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7570 privateData,
7571 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7572 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007573 }
7574
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007575 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007576 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007577 meteringMode, 1);
7578 }
7579
Thierry Strudel54dc9782017-02-15 12:12:10 -08007580 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7581 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7582 LOGD("hdr_scene_data: %d %f\n",
7583 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7584 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7585 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7586 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7587 &isHdr, 1);
7588 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7589 &isHdrConfidence, 1);
7590 }
7591
7592
7593
Thierry Strudel3d639192016-09-09 11:52:26 -07007594 if (metadata->is_tuning_params_valid) {
7595 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7596 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7597 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7598
7599
7600 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7601 sizeof(uint32_t));
7602 data += sizeof(uint32_t);
7603
7604 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7605 sizeof(uint32_t));
7606 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7607 data += sizeof(uint32_t);
7608
7609 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7610 sizeof(uint32_t));
7611 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7612 data += sizeof(uint32_t);
7613
7614 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7615 sizeof(uint32_t));
7616 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7617 data += sizeof(uint32_t);
7618
7619 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7620 sizeof(uint32_t));
7621 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7622 data += sizeof(uint32_t);
7623
7624 metadata->tuning_params.tuning_mod3_data_size = 0;
7625 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7626 sizeof(uint32_t));
7627 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7628 data += sizeof(uint32_t);
7629
7630 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7631 TUNING_SENSOR_DATA_MAX);
7632 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7633 count);
7634 data += count;
7635
7636 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7637 TUNING_VFE_DATA_MAX);
7638 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7639 count);
7640 data += count;
7641
7642 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7643 TUNING_CPP_DATA_MAX);
7644 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7645 count);
7646 data += count;
7647
7648 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7649 TUNING_CAC_DATA_MAX);
7650 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7651 count);
7652 data += count;
7653
7654 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7655 (int32_t *)(void *)tuning_meta_data_blob,
7656 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7657 }
7658
7659 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7660 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7661 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7662 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7663 NEUTRAL_COL_POINTS);
7664 }
7665
7666 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7667 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7668 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7669 }
7670
7671 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7672 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7673 // Adjust crop region from sensor output coordinate system to active
7674 // array coordinate system.
7675 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7676 hAeRegions->rect.width, hAeRegions->rect.height);
7677
7678 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7679 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7680 REGIONS_TUPLE_COUNT);
7681 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7682 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7683 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7684 hAeRegions->rect.height);
7685 }
7686
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007687 if (!pendingRequest.focusStateSent) {
7688 if (pendingRequest.focusStateValid) {
7689 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7690 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007691 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007692 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7693 uint8_t fwk_afState = (uint8_t) *afState;
7694 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7695 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7696 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007697 }
7698 }
7699
Thierry Strudel3d639192016-09-09 11:52:26 -07007700 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7701 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7702 }
7703
7704 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7705 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7706 }
7707
7708 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7709 uint8_t fwk_lensState = *lensState;
7710 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7711 }
7712
Thierry Strudel3d639192016-09-09 11:52:26 -07007713 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007714 uint32_t ab_mode = *hal_ab_mode;
7715 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7716 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7717 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7718 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007719 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007720 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007721 if (NAME_NOT_FOUND != val) {
7722 uint8_t fwk_ab_mode = (uint8_t)val;
7723 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7724 }
7725 }
7726
7727 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7728 int val = lookupFwkName(SCENE_MODES_MAP,
7729 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7730 if (NAME_NOT_FOUND != val) {
7731 uint8_t fwkBestshotMode = (uint8_t)val;
7732 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7733 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7734 } else {
7735 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7736 }
7737 }
7738
7739 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7740 uint8_t fwk_mode = (uint8_t) *mode;
7741 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7742 }
7743
7744 /* Constant metadata values to be update*/
7745 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7746 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7747
7748 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7749 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7750
7751 int32_t hotPixelMap[2];
7752 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7753
7754 // CDS
7755 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7756 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7757 }
7758
Thierry Strudel04e026f2016-10-10 11:27:36 -07007759 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7760 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007761 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007762 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7763 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7764 } else {
7765 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7766 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007767
7768 if(fwk_hdr != curr_hdr_state) {
7769 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7770 if(fwk_hdr)
7771 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7772 else
7773 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7774 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007775 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7776 }
7777
Thierry Strudel54dc9782017-02-15 12:12:10 -08007778 //binning correction
7779 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7780 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7781 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7782 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7783 }
7784
Thierry Strudel04e026f2016-10-10 11:27:36 -07007785 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007786 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007787 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7788 int8_t is_ir_on = 0;
7789
7790 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7791 if(is_ir_on != curr_ir_state) {
7792 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7793 if(is_ir_on)
7794 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7795 else
7796 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7797 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007798 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007799 }
7800
Thierry Strudel269c81a2016-10-12 12:13:59 -07007801 // AEC SPEED
7802 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7803 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7804 }
7805
7806 // AWB SPEED
7807 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7808 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7809 }
7810
Thierry Strudel3d639192016-09-09 11:52:26 -07007811 // TNR
7812 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7813 uint8_t tnr_enable = tnr->denoise_enable;
7814 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007815 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7816 int8_t is_tnr_on = 0;
7817
7818 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7819 if(is_tnr_on != curr_tnr_state) {
7820 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7821 if(is_tnr_on)
7822 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7823 else
7824 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7825 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007826
7827 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7828 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7829 }
7830
7831 // Reprocess crop data
7832 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7833 uint8_t cnt = crop_data->num_of_streams;
7834 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7835 // mm-qcamera-daemon only posts crop_data for streams
7836 // not linked to pproc. So no valid crop metadata is not
7837 // necessarily an error case.
7838 LOGD("No valid crop metadata entries");
7839 } else {
7840 uint32_t reproc_stream_id;
7841 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7842 LOGD("No reprocessible stream found, ignore crop data");
7843 } else {
7844 int rc = NO_ERROR;
7845 Vector<int32_t> roi_map;
7846 int32_t *crop = new int32_t[cnt*4];
7847 if (NULL == crop) {
7848 rc = NO_MEMORY;
7849 }
7850 if (NO_ERROR == rc) {
7851 int32_t streams_found = 0;
7852 for (size_t i = 0; i < cnt; i++) {
7853 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7854 if (pprocDone) {
7855 // HAL already does internal reprocessing,
7856 // either via reprocessing before JPEG encoding,
7857 // or offline postprocessing for pproc bypass case.
7858 crop[0] = 0;
7859 crop[1] = 0;
7860 crop[2] = mInputStreamInfo.dim.width;
7861 crop[3] = mInputStreamInfo.dim.height;
7862 } else {
7863 crop[0] = crop_data->crop_info[i].crop.left;
7864 crop[1] = crop_data->crop_info[i].crop.top;
7865 crop[2] = crop_data->crop_info[i].crop.width;
7866 crop[3] = crop_data->crop_info[i].crop.height;
7867 }
7868 roi_map.add(crop_data->crop_info[i].roi_map.left);
7869 roi_map.add(crop_data->crop_info[i].roi_map.top);
7870 roi_map.add(crop_data->crop_info[i].roi_map.width);
7871 roi_map.add(crop_data->crop_info[i].roi_map.height);
7872 streams_found++;
7873 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7874 crop[0], crop[1], crop[2], crop[3]);
7875 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7876 crop_data->crop_info[i].roi_map.left,
7877 crop_data->crop_info[i].roi_map.top,
7878 crop_data->crop_info[i].roi_map.width,
7879 crop_data->crop_info[i].roi_map.height);
7880 break;
7881
7882 }
7883 }
7884 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7885 &streams_found, 1);
7886 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7887 crop, (size_t)(streams_found * 4));
7888 if (roi_map.array()) {
7889 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7890 roi_map.array(), roi_map.size());
7891 }
7892 }
7893 if (crop) {
7894 delete [] crop;
7895 }
7896 }
7897 }
7898 }
7899
7900 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7901 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7902 // so hardcoding the CAC result to OFF mode.
7903 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7904 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7905 } else {
7906 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7907 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7908 *cacMode);
7909 if (NAME_NOT_FOUND != val) {
7910 uint8_t resultCacMode = (uint8_t)val;
7911 // check whether CAC result from CB is equal to Framework set CAC mode
7912 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007913 if (pendingRequest.fwkCacMode != resultCacMode) {
7914 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007915 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007916 //Check if CAC is disabled by property
7917 if (m_cacModeDisabled) {
7918 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7919 }
7920
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007921 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007922 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7923 } else {
7924 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7925 }
7926 }
7927 }
7928
7929 // Post blob of cam_cds_data through vendor tag.
7930 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7931 uint8_t cnt = cdsInfo->num_of_streams;
7932 cam_cds_data_t cdsDataOverride;
7933 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7934 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7935 cdsDataOverride.num_of_streams = 1;
7936 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7937 uint32_t reproc_stream_id;
7938 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7939 LOGD("No reprocessible stream found, ignore cds data");
7940 } else {
7941 for (size_t i = 0; i < cnt; i++) {
7942 if (cdsInfo->cds_info[i].stream_id ==
7943 reproc_stream_id) {
7944 cdsDataOverride.cds_info[0].cds_enable =
7945 cdsInfo->cds_info[i].cds_enable;
7946 break;
7947 }
7948 }
7949 }
7950 } else {
7951 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7952 }
7953 camMetadata.update(QCAMERA3_CDS_INFO,
7954 (uint8_t *)&cdsDataOverride,
7955 sizeof(cam_cds_data_t));
7956 }
7957
7958 // Ldaf calibration data
7959 if (!mLdafCalibExist) {
7960 IF_META_AVAILABLE(uint32_t, ldafCalib,
7961 CAM_INTF_META_LDAF_EXIF, metadata) {
7962 mLdafCalibExist = true;
7963 mLdafCalib[0] = ldafCalib[0];
7964 mLdafCalib[1] = ldafCalib[1];
7965 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7966 ldafCalib[0], ldafCalib[1]);
7967 }
7968 }
7969
Thierry Strudel54dc9782017-02-15 12:12:10 -08007970 // EXIF debug data through vendor tag
7971 /*
7972 * Mobicat Mask can assume 3 values:
7973 * 1 refers to Mobicat data,
7974 * 2 refers to Stats Debug and Exif Debug Data
7975 * 3 refers to Mobicat and Stats Debug Data
7976 * We want to make sure that we are sending Exif debug data
7977 * only when Mobicat Mask is 2.
7978 */
7979 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7980 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7981 (uint8_t *)(void *)mExifParams.debug_params,
7982 sizeof(mm_jpeg_debug_exif_params_t));
7983 }
7984
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007985 // Reprocess and DDM debug data through vendor tag
7986 cam_reprocess_info_t repro_info;
7987 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007988 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7989 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007990 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007991 }
7992 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7993 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007994 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007995 }
7996 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7997 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007998 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007999 }
8000 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8001 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008002 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008003 }
8004 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8005 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008006 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008007 }
8008 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008009 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008010 }
8011 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8012 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008013 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008014 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008015 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8016 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8017 }
8018 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8019 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8020 }
8021 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8022 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008023
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008024 // INSTANT AEC MODE
8025 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8026 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8027 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8028 }
8029
Shuzhen Wange763e802016-03-31 10:24:29 -07008030 // AF scene change
8031 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8032 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8033 }
8034
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008035 // Enable ZSL
8036 if (enableZsl != nullptr) {
8037 uint8_t value = *enableZsl ?
8038 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8039 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8040 }
8041
Xu Han821ea9c2017-05-23 09:00:40 -07008042 // OIS Data
8043 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8044 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8045 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8046 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8047 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8048 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8049 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8050 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8051 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8052 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8053 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8054 }
8055
Thierry Strudel3d639192016-09-09 11:52:26 -07008056 resultMetadata = camMetadata.release();
8057 return resultMetadata;
8058}
8059
8060/*===========================================================================
8061 * FUNCTION : saveExifParams
8062 *
8063 * DESCRIPTION:
8064 *
8065 * PARAMETERS :
8066 * @metadata : metadata information from callback
8067 *
8068 * RETURN : none
8069 *
8070 *==========================================================================*/
8071void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8072{
8073 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8074 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8075 if (mExifParams.debug_params) {
8076 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8077 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8078 }
8079 }
8080 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8081 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8082 if (mExifParams.debug_params) {
8083 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8084 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8085 }
8086 }
8087 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8088 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8089 if (mExifParams.debug_params) {
8090 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8091 mExifParams.debug_params->af_debug_params_valid = TRUE;
8092 }
8093 }
8094 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8095 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8096 if (mExifParams.debug_params) {
8097 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8098 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8099 }
8100 }
8101 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8102 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8103 if (mExifParams.debug_params) {
8104 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8105 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8106 }
8107 }
8108 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8109 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8110 if (mExifParams.debug_params) {
8111 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8112 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8113 }
8114 }
8115 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8116 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8117 if (mExifParams.debug_params) {
8118 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8119 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8120 }
8121 }
8122 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8123 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8124 if (mExifParams.debug_params) {
8125 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8126 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8127 }
8128 }
8129}
8130
8131/*===========================================================================
8132 * FUNCTION : get3AExifParams
8133 *
8134 * DESCRIPTION:
8135 *
8136 * PARAMETERS : none
8137 *
8138 *
8139 * RETURN : mm_jpeg_exif_params_t
8140 *
8141 *==========================================================================*/
8142mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8143{
8144 return mExifParams;
8145}
8146
8147/*===========================================================================
8148 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8149 *
8150 * DESCRIPTION:
8151 *
8152 * PARAMETERS :
8153 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008154 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8155 * urgent metadata in a batch. Always true for
8156 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008157 * @frame_number : frame number for this urgent metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07008158 *
8159 * RETURN : camera_metadata_t*
8160 * metadata in a format specified by fwk
8161 *==========================================================================*/
8162camera_metadata_t*
8163QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008164 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
8165 uint32_t frame_number)
Thierry Strudel3d639192016-09-09 11:52:26 -07008166{
8167 CameraMetadata camMetadata;
8168 camera_metadata_t *resultMetadata;
8169
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008170 if (!lastUrgentMetadataInBatch) {
8171 /* In batch mode, use empty metadata if this is not the last in batch
8172 */
8173 resultMetadata = allocate_camera_metadata(0, 0);
8174 return resultMetadata;
8175 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008176
8177 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8178 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8179 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8180 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8181 }
8182
8183 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8184 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8185 &aecTrigger->trigger, 1);
8186 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8187 &aecTrigger->trigger_id, 1);
8188 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8189 aecTrigger->trigger);
8190 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8191 aecTrigger->trigger_id);
8192 }
8193
8194 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8195 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8196 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8197 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8198 }
8199
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008200 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8201 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8202 if (NAME_NOT_FOUND != val) {
8203 uint8_t fwkAfMode = (uint8_t)val;
8204 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8205 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8206 } else {
8207 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8208 val);
8209 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008210 }
8211
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008212 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8213 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8214 af_trigger->trigger);
8215 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8216 af_trigger->trigger_id);
8217
8218 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8219 mAfTrigger = *af_trigger;
8220 uint32_t fwk_AfState = (uint32_t) *afState;
8221
8222 // If this is the result for a new trigger, check if there is new early
8223 // af state. If there is, use the last af state for all results
8224 // preceding current partial frame number.
8225 for (auto & pendingRequest : mPendingRequestsList) {
8226 if (pendingRequest.frame_number < frame_number) {
8227 pendingRequest.focusStateValid = true;
8228 pendingRequest.focusState = fwk_AfState;
8229 } else if (pendingRequest.frame_number == frame_number) {
8230 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8231 // Check if early AF state for trigger exists. If yes, send AF state as
8232 // partial result for better latency.
8233 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8234 pendingRequest.focusStateSent = true;
8235 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8236 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8237 frame_number, fwkEarlyAfState);
8238 }
8239 }
8240 }
8241 }
8242 }
8243 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8244 &mAfTrigger.trigger, 1);
8245 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8246
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008247 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8248 /*af regions*/
8249 int32_t afRegions[REGIONS_TUPLE_COUNT];
8250 // Adjust crop region from sensor output coordinate system to active
8251 // array coordinate system.
8252 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8253 hAfRegions->rect.width, hAfRegions->rect.height);
8254
8255 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8256 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8257 REGIONS_TUPLE_COUNT);
8258 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8259 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8260 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8261 hAfRegions->rect.height);
8262 }
8263
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008264 // AF region confidence
8265 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8266 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8267 }
8268
Thierry Strudel3d639192016-09-09 11:52:26 -07008269 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8270 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8271 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8272 if (NAME_NOT_FOUND != val) {
8273 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8274 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8275 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8276 } else {
8277 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8278 }
8279 }
8280
8281 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8282 uint32_t aeMode = CAM_AE_MODE_MAX;
8283 int32_t flashMode = CAM_FLASH_MODE_MAX;
8284 int32_t redeye = -1;
8285 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8286 aeMode = *pAeMode;
8287 }
8288 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8289 flashMode = *pFlashMode;
8290 }
8291 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8292 redeye = *pRedeye;
8293 }
8294
8295 if (1 == redeye) {
8296 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8297 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8298 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8299 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8300 flashMode);
8301 if (NAME_NOT_FOUND != val) {
8302 fwk_aeMode = (uint8_t)val;
8303 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8304 } else {
8305 LOGE("Unsupported flash mode %d", flashMode);
8306 }
8307 } else if (aeMode == CAM_AE_MODE_ON) {
8308 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8309 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8310 } else if (aeMode == CAM_AE_MODE_OFF) {
8311 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8312 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008313 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8314 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8315 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008316 } else {
8317 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8318 "flashMode:%d, aeMode:%u!!!",
8319 redeye, flashMode, aeMode);
8320 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008321 if (mInstantAEC) {
8322 // Increment frame Idx count untill a bound reached for instant AEC.
8323 mInstantAecFrameIdxCount++;
8324 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8325 CAM_INTF_META_AEC_INFO, metadata) {
8326 LOGH("ae_params->settled = %d",ae_params->settled);
8327 // If AEC settled, or if number of frames reached bound value,
8328 // should reset instant AEC.
8329 if (ae_params->settled ||
8330 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8331 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8332 mInstantAEC = false;
8333 mResetInstantAEC = true;
8334 mInstantAecFrameIdxCount = 0;
8335 }
8336 }
8337 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008338 resultMetadata = camMetadata.release();
8339 return resultMetadata;
8340}
8341
8342/*===========================================================================
8343 * FUNCTION : dumpMetadataToFile
8344 *
8345 * DESCRIPTION: Dumps tuning metadata to file system
8346 *
8347 * PARAMETERS :
8348 * @meta : tuning metadata
8349 * @dumpFrameCount : current dump frame count
8350 * @enabled : Enable mask
8351 *
8352 *==========================================================================*/
8353void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8354 uint32_t &dumpFrameCount,
8355 bool enabled,
8356 const char *type,
8357 uint32_t frameNumber)
8358{
8359 //Some sanity checks
8360 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8361 LOGE("Tuning sensor data size bigger than expected %d: %d",
8362 meta.tuning_sensor_data_size,
8363 TUNING_SENSOR_DATA_MAX);
8364 return;
8365 }
8366
8367 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8368 LOGE("Tuning VFE data size bigger than expected %d: %d",
8369 meta.tuning_vfe_data_size,
8370 TUNING_VFE_DATA_MAX);
8371 return;
8372 }
8373
8374 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8375 LOGE("Tuning CPP data size bigger than expected %d: %d",
8376 meta.tuning_cpp_data_size,
8377 TUNING_CPP_DATA_MAX);
8378 return;
8379 }
8380
8381 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8382 LOGE("Tuning CAC data size bigger than expected %d: %d",
8383 meta.tuning_cac_data_size,
8384 TUNING_CAC_DATA_MAX);
8385 return;
8386 }
8387 //
8388
8389 if(enabled){
8390 char timeBuf[FILENAME_MAX];
8391 char buf[FILENAME_MAX];
8392 memset(buf, 0, sizeof(buf));
8393 memset(timeBuf, 0, sizeof(timeBuf));
8394 time_t current_time;
8395 struct tm * timeinfo;
8396 time (&current_time);
8397 timeinfo = localtime (&current_time);
8398 if (timeinfo != NULL) {
8399 strftime (timeBuf, sizeof(timeBuf),
8400 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8401 }
8402 String8 filePath(timeBuf);
8403 snprintf(buf,
8404 sizeof(buf),
8405 "%dm_%s_%d.bin",
8406 dumpFrameCount,
8407 type,
8408 frameNumber);
8409 filePath.append(buf);
8410 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8411 if (file_fd >= 0) {
8412 ssize_t written_len = 0;
8413 meta.tuning_data_version = TUNING_DATA_VERSION;
8414 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8415 written_len += write(file_fd, data, sizeof(uint32_t));
8416 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8417 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8418 written_len += write(file_fd, data, sizeof(uint32_t));
8419 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8420 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8421 written_len += write(file_fd, data, sizeof(uint32_t));
8422 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8423 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8424 written_len += write(file_fd, data, sizeof(uint32_t));
8425 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8426 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8427 written_len += write(file_fd, data, sizeof(uint32_t));
8428 meta.tuning_mod3_data_size = 0;
8429 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8430 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8431 written_len += write(file_fd, data, sizeof(uint32_t));
8432 size_t total_size = meta.tuning_sensor_data_size;
8433 data = (void *)((uint8_t *)&meta.data);
8434 written_len += write(file_fd, data, total_size);
8435 total_size = meta.tuning_vfe_data_size;
8436 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8437 written_len += write(file_fd, data, total_size);
8438 total_size = meta.tuning_cpp_data_size;
8439 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8440 written_len += write(file_fd, data, total_size);
8441 total_size = meta.tuning_cac_data_size;
8442 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8443 written_len += write(file_fd, data, total_size);
8444 close(file_fd);
8445 }else {
8446 LOGE("fail to open file for metadata dumping");
8447 }
8448 }
8449}
8450
8451/*===========================================================================
8452 * FUNCTION : cleanAndSortStreamInfo
8453 *
8454 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8455 * and sort them such that raw stream is at the end of the list
8456 * This is a workaround for camera daemon constraint.
8457 *
8458 * PARAMETERS : None
8459 *
8460 *==========================================================================*/
8461void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8462{
8463 List<stream_info_t *> newStreamInfo;
8464
8465 /*clean up invalid streams*/
8466 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8467 it != mStreamInfo.end();) {
8468 if(((*it)->status) == INVALID){
8469 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8470 delete channel;
8471 free(*it);
8472 it = mStreamInfo.erase(it);
8473 } else {
8474 it++;
8475 }
8476 }
8477
8478 // Move preview/video/callback/snapshot streams into newList
8479 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8480 it != mStreamInfo.end();) {
8481 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8482 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8483 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8484 newStreamInfo.push_back(*it);
8485 it = mStreamInfo.erase(it);
8486 } else
8487 it++;
8488 }
8489 // Move raw streams into newList
8490 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8491 it != mStreamInfo.end();) {
8492 newStreamInfo.push_back(*it);
8493 it = mStreamInfo.erase(it);
8494 }
8495
8496 mStreamInfo = newStreamInfo;
8497}
8498
8499/*===========================================================================
8500 * FUNCTION : extractJpegMetadata
8501 *
8502 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8503 * JPEG metadata is cached in HAL, and return as part of capture
8504 * result when metadata is returned from camera daemon.
8505 *
8506 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8507 * @request: capture request
8508 *
8509 *==========================================================================*/
8510void QCamera3HardwareInterface::extractJpegMetadata(
8511 CameraMetadata& jpegMetadata,
8512 const camera3_capture_request_t *request)
8513{
8514 CameraMetadata frame_settings;
8515 frame_settings = request->settings;
8516
8517 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8518 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8519 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8520 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8521
8522 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8523 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8524 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8525 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8526
8527 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8528 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8529 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8530 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8531
8532 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8533 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8534 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8535 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8536
8537 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8538 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8539 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8540 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8541
8542 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8543 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8544 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8545 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8546
8547 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8548 int32_t thumbnail_size[2];
8549 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8550 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8551 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8552 int32_t orientation =
8553 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008554 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008555 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8556 int32_t temp;
8557 temp = thumbnail_size[0];
8558 thumbnail_size[0] = thumbnail_size[1];
8559 thumbnail_size[1] = temp;
8560 }
8561 }
8562 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8563 thumbnail_size,
8564 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8565 }
8566
8567}
8568
8569/*===========================================================================
8570 * FUNCTION : convertToRegions
8571 *
8572 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8573 *
8574 * PARAMETERS :
8575 * @rect : cam_rect_t struct to convert
8576 * @region : int32_t destination array
8577 * @weight : if we are converting from cam_area_t, weight is valid
8578 * else weight = -1
8579 *
8580 *==========================================================================*/
8581void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8582 int32_t *region, int weight)
8583{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008584 region[FACE_LEFT] = rect.left;
8585 region[FACE_TOP] = rect.top;
8586 region[FACE_RIGHT] = rect.left + rect.width;
8587 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008588 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008589 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008590 }
8591}
8592
8593/*===========================================================================
8594 * FUNCTION : convertFromRegions
8595 *
8596 * DESCRIPTION: helper method to convert from array to cam_rect_t
8597 *
8598 * PARAMETERS :
8599 * @rect : cam_rect_t struct to convert
8600 * @region : int32_t destination array
8601 * @weight : if we are converting from cam_area_t, weight is valid
8602 * else weight = -1
8603 *
8604 *==========================================================================*/
8605void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008606 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008607{
Thierry Strudel3d639192016-09-09 11:52:26 -07008608 int32_t x_min = frame_settings.find(tag).data.i32[0];
8609 int32_t y_min = frame_settings.find(tag).data.i32[1];
8610 int32_t x_max = frame_settings.find(tag).data.i32[2];
8611 int32_t y_max = frame_settings.find(tag).data.i32[3];
8612 roi.weight = frame_settings.find(tag).data.i32[4];
8613 roi.rect.left = x_min;
8614 roi.rect.top = y_min;
8615 roi.rect.width = x_max - x_min;
8616 roi.rect.height = y_max - y_min;
8617}
8618
8619/*===========================================================================
8620 * FUNCTION : resetIfNeededROI
8621 *
8622 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8623 * crop region
8624 *
8625 * PARAMETERS :
8626 * @roi : cam_area_t struct to resize
8627 * @scalerCropRegion : cam_crop_region_t region to compare against
8628 *
8629 *
8630 *==========================================================================*/
8631bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8632 const cam_crop_region_t* scalerCropRegion)
8633{
8634 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8635 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8636 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8637 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8638
8639 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8640 * without having this check the calculations below to validate if the roi
8641 * is inside scalar crop region will fail resulting in the roi not being
8642 * reset causing algorithm to continue to use stale roi window
8643 */
8644 if (roi->weight == 0) {
8645 return true;
8646 }
8647
8648 if ((roi_x_max < scalerCropRegion->left) ||
8649 // right edge of roi window is left of scalar crop's left edge
8650 (roi_y_max < scalerCropRegion->top) ||
8651 // bottom edge of roi window is above scalar crop's top edge
8652 (roi->rect.left > crop_x_max) ||
8653 // left edge of roi window is beyond(right) of scalar crop's right edge
8654 (roi->rect.top > crop_y_max)){
8655 // top edge of roi windo is above scalar crop's top edge
8656 return false;
8657 }
8658 if (roi->rect.left < scalerCropRegion->left) {
8659 roi->rect.left = scalerCropRegion->left;
8660 }
8661 if (roi->rect.top < scalerCropRegion->top) {
8662 roi->rect.top = scalerCropRegion->top;
8663 }
8664 if (roi_x_max > crop_x_max) {
8665 roi_x_max = crop_x_max;
8666 }
8667 if (roi_y_max > crop_y_max) {
8668 roi_y_max = crop_y_max;
8669 }
8670 roi->rect.width = roi_x_max - roi->rect.left;
8671 roi->rect.height = roi_y_max - roi->rect.top;
8672 return true;
8673}
8674
8675/*===========================================================================
8676 * FUNCTION : convertLandmarks
8677 *
8678 * DESCRIPTION: helper method to extract the landmarks from face detection info
8679 *
8680 * PARAMETERS :
8681 * @landmark_data : input landmark data to be converted
8682 * @landmarks : int32_t destination array
8683 *
8684 *
8685 *==========================================================================*/
8686void QCamera3HardwareInterface::convertLandmarks(
8687 cam_face_landmarks_info_t landmark_data,
8688 int32_t *landmarks)
8689{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008690 if (landmark_data.is_left_eye_valid) {
8691 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8692 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8693 } else {
8694 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8695 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8696 }
8697
8698 if (landmark_data.is_right_eye_valid) {
8699 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8700 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8701 } else {
8702 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8703 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8704 }
8705
8706 if (landmark_data.is_mouth_valid) {
8707 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8708 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8709 } else {
8710 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8711 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8712 }
8713}
8714
8715/*===========================================================================
8716 * FUNCTION : setInvalidLandmarks
8717 *
8718 * DESCRIPTION: helper method to set invalid landmarks
8719 *
8720 * PARAMETERS :
8721 * @landmarks : int32_t destination array
8722 *
8723 *
8724 *==========================================================================*/
8725void QCamera3HardwareInterface::setInvalidLandmarks(
8726 int32_t *landmarks)
8727{
8728 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8729 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8730 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8731 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8732 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8733 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008734}
8735
8736#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008737
8738/*===========================================================================
8739 * FUNCTION : getCapabilities
8740 *
8741 * DESCRIPTION: query camera capability from back-end
8742 *
8743 * PARAMETERS :
8744 * @ops : mm-interface ops structure
8745 * @cam_handle : camera handle for which we need capability
8746 *
8747 * RETURN : ptr type of capability structure
8748 * capability for success
8749 * NULL for failure
8750 *==========================================================================*/
8751cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8752 uint32_t cam_handle)
8753{
8754 int rc = NO_ERROR;
8755 QCamera3HeapMemory *capabilityHeap = NULL;
8756 cam_capability_t *cap_ptr = NULL;
8757
8758 if (ops == NULL) {
8759 LOGE("Invalid arguments");
8760 return NULL;
8761 }
8762
8763 capabilityHeap = new QCamera3HeapMemory(1);
8764 if (capabilityHeap == NULL) {
8765 LOGE("creation of capabilityHeap failed");
8766 return NULL;
8767 }
8768
8769 /* Allocate memory for capability buffer */
8770 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8771 if(rc != OK) {
8772 LOGE("No memory for cappability");
8773 goto allocate_failed;
8774 }
8775
8776 /* Map memory for capability buffer */
8777 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8778
8779 rc = ops->map_buf(cam_handle,
8780 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8781 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8782 if(rc < 0) {
8783 LOGE("failed to map capability buffer");
8784 rc = FAILED_TRANSACTION;
8785 goto map_failed;
8786 }
8787
8788 /* Query Capability */
8789 rc = ops->query_capability(cam_handle);
8790 if(rc < 0) {
8791 LOGE("failed to query capability");
8792 rc = FAILED_TRANSACTION;
8793 goto query_failed;
8794 }
8795
8796 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8797 if (cap_ptr == NULL) {
8798 LOGE("out of memory");
8799 rc = NO_MEMORY;
8800 goto query_failed;
8801 }
8802
8803 memset(cap_ptr, 0, sizeof(cam_capability_t));
8804 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8805
8806 int index;
8807 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8808 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8809 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8810 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8811 }
8812
8813query_failed:
8814 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8815map_failed:
8816 capabilityHeap->deallocate();
8817allocate_failed:
8818 delete capabilityHeap;
8819
8820 if (rc != NO_ERROR) {
8821 return NULL;
8822 } else {
8823 return cap_ptr;
8824 }
8825}
8826
Thierry Strudel3d639192016-09-09 11:52:26 -07008827/*===========================================================================
8828 * FUNCTION : initCapabilities
8829 *
8830 * DESCRIPTION: initialize camera capabilities in static data struct
8831 *
8832 * PARAMETERS :
8833 * @cameraId : camera Id
8834 *
8835 * RETURN : int32_t type of status
8836 * NO_ERROR -- success
8837 * none-zero failure code
8838 *==========================================================================*/
8839int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8840{
8841 int rc = 0;
8842 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008843 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008844
8845 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8846 if (rc) {
8847 LOGE("camera_open failed. rc = %d", rc);
8848 goto open_failed;
8849 }
8850 if (!cameraHandle) {
8851 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8852 goto open_failed;
8853 }
8854
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008855 handle = get_main_camera_handle(cameraHandle->camera_handle);
8856 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8857 if (gCamCapability[cameraId] == NULL) {
8858 rc = FAILED_TRANSACTION;
8859 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008860 }
8861
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008862 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008863 if (is_dual_camera_by_idx(cameraId)) {
8864 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8865 gCamCapability[cameraId]->aux_cam_cap =
8866 getCapabilities(cameraHandle->ops, handle);
8867 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8868 rc = FAILED_TRANSACTION;
8869 free(gCamCapability[cameraId]);
8870 goto failed_op;
8871 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008872
8873 // Copy the main camera capability to main_cam_cap struct
8874 gCamCapability[cameraId]->main_cam_cap =
8875 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8876 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8877 LOGE("out of memory");
8878 rc = NO_MEMORY;
8879 goto failed_op;
8880 }
8881 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8882 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008883 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008884failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008885 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8886 cameraHandle = NULL;
8887open_failed:
8888 return rc;
8889}
8890
8891/*==========================================================================
8892 * FUNCTION : get3Aversion
8893 *
8894 * DESCRIPTION: get the Q3A S/W version
8895 *
8896 * PARAMETERS :
8897 * @sw_version: Reference of Q3A structure which will hold version info upon
8898 * return
8899 *
8900 * RETURN : None
8901 *
8902 *==========================================================================*/
8903void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8904{
8905 if(gCamCapability[mCameraId])
8906 sw_version = gCamCapability[mCameraId]->q3a_version;
8907 else
8908 LOGE("Capability structure NULL!");
8909}
8910
8911
8912/*===========================================================================
8913 * FUNCTION : initParameters
8914 *
8915 * DESCRIPTION: initialize camera parameters
8916 *
8917 * PARAMETERS :
8918 *
8919 * RETURN : int32_t type of status
8920 * NO_ERROR -- success
8921 * none-zero failure code
8922 *==========================================================================*/
8923int QCamera3HardwareInterface::initParameters()
8924{
8925 int rc = 0;
8926
8927 //Allocate Set Param Buffer
8928 mParamHeap = new QCamera3HeapMemory(1);
8929 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8930 if(rc != OK) {
8931 rc = NO_MEMORY;
8932 LOGE("Failed to allocate SETPARM Heap memory");
8933 delete mParamHeap;
8934 mParamHeap = NULL;
8935 return rc;
8936 }
8937
8938 //Map memory for parameters buffer
8939 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8940 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8941 mParamHeap->getFd(0),
8942 sizeof(metadata_buffer_t),
8943 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8944 if(rc < 0) {
8945 LOGE("failed to map SETPARM buffer");
8946 rc = FAILED_TRANSACTION;
8947 mParamHeap->deallocate();
8948 delete mParamHeap;
8949 mParamHeap = NULL;
8950 return rc;
8951 }
8952
8953 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8954
8955 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8956 return rc;
8957}
8958
8959/*===========================================================================
8960 * FUNCTION : deinitParameters
8961 *
8962 * DESCRIPTION: de-initialize camera parameters
8963 *
8964 * PARAMETERS :
8965 *
8966 * RETURN : NONE
8967 *==========================================================================*/
8968void QCamera3HardwareInterface::deinitParameters()
8969{
8970 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8971 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8972
8973 mParamHeap->deallocate();
8974 delete mParamHeap;
8975 mParamHeap = NULL;
8976
8977 mParameters = NULL;
8978
8979 free(mPrevParameters);
8980 mPrevParameters = NULL;
8981}
8982
8983/*===========================================================================
8984 * FUNCTION : calcMaxJpegSize
8985 *
8986 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8987 *
8988 * PARAMETERS :
8989 *
8990 * RETURN : max_jpeg_size
8991 *==========================================================================*/
8992size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8993{
8994 size_t max_jpeg_size = 0;
8995 size_t temp_width, temp_height;
8996 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8997 MAX_SIZES_CNT);
8998 for (size_t i = 0; i < count; i++) {
8999 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9000 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9001 if (temp_width * temp_height > max_jpeg_size ) {
9002 max_jpeg_size = temp_width * temp_height;
9003 }
9004 }
9005 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9006 return max_jpeg_size;
9007}
9008
9009/*===========================================================================
9010 * FUNCTION : getMaxRawSize
9011 *
9012 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9013 *
9014 * PARAMETERS :
9015 *
9016 * RETURN : Largest supported Raw Dimension
9017 *==========================================================================*/
9018cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9019{
9020 int max_width = 0;
9021 cam_dimension_t maxRawSize;
9022
9023 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9024 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9025 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9026 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9027 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9028 }
9029 }
9030 return maxRawSize;
9031}
9032
9033
9034/*===========================================================================
9035 * FUNCTION : calcMaxJpegDim
9036 *
9037 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9038 *
9039 * PARAMETERS :
9040 *
9041 * RETURN : max_jpeg_dim
9042 *==========================================================================*/
9043cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9044{
9045 cam_dimension_t max_jpeg_dim;
9046 cam_dimension_t curr_jpeg_dim;
9047 max_jpeg_dim.width = 0;
9048 max_jpeg_dim.height = 0;
9049 curr_jpeg_dim.width = 0;
9050 curr_jpeg_dim.height = 0;
9051 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9052 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9053 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9054 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9055 max_jpeg_dim.width * max_jpeg_dim.height ) {
9056 max_jpeg_dim.width = curr_jpeg_dim.width;
9057 max_jpeg_dim.height = curr_jpeg_dim.height;
9058 }
9059 }
9060 return max_jpeg_dim;
9061}
9062
9063/*===========================================================================
9064 * FUNCTION : addStreamConfig
9065 *
9066 * DESCRIPTION: adds the stream configuration to the array
9067 *
9068 * PARAMETERS :
9069 * @available_stream_configs : pointer to stream configuration array
9070 * @scalar_format : scalar format
9071 * @dim : configuration dimension
9072 * @config_type : input or output configuration type
9073 *
9074 * RETURN : NONE
9075 *==========================================================================*/
9076void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9077 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9078{
9079 available_stream_configs.add(scalar_format);
9080 available_stream_configs.add(dim.width);
9081 available_stream_configs.add(dim.height);
9082 available_stream_configs.add(config_type);
9083}
9084
9085/*===========================================================================
9086 * FUNCTION : suppportBurstCapture
9087 *
9088 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9089 *
9090 * PARAMETERS :
9091 * @cameraId : camera Id
9092 *
9093 * RETURN : true if camera supports BURST_CAPTURE
9094 * false otherwise
9095 *==========================================================================*/
9096bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9097{
9098 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9099 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9100 const int32_t highResWidth = 3264;
9101 const int32_t highResHeight = 2448;
9102
9103 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9104 // Maximum resolution images cannot be captured at >= 10fps
9105 // -> not supporting BURST_CAPTURE
9106 return false;
9107 }
9108
9109 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9110 // Maximum resolution images can be captured at >= 20fps
9111 // --> supporting BURST_CAPTURE
9112 return true;
9113 }
9114
9115 // Find the smallest highRes resolution, or largest resolution if there is none
9116 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9117 MAX_SIZES_CNT);
9118 size_t highRes = 0;
9119 while ((highRes + 1 < totalCnt) &&
9120 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9121 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9122 highResWidth * highResHeight)) {
9123 highRes++;
9124 }
9125 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9126 return true;
9127 } else {
9128 return false;
9129 }
9130}
9131
9132/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009133 * FUNCTION : getPDStatIndex
9134 *
9135 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9136 *
9137 * PARAMETERS :
9138 * @caps : camera capabilities
9139 *
9140 * RETURN : int32_t type
9141 * non-negative - on success
9142 * -1 - on failure
9143 *==========================================================================*/
9144int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9145 if (nullptr == caps) {
9146 return -1;
9147 }
9148
9149 uint32_t metaRawCount = caps->meta_raw_channel_count;
9150 int32_t ret = -1;
9151 for (size_t i = 0; i < metaRawCount; i++) {
9152 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9153 ret = i;
9154 break;
9155 }
9156 }
9157
9158 return ret;
9159}
9160
9161/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009162 * FUNCTION : initStaticMetadata
9163 *
9164 * DESCRIPTION: initialize the static metadata
9165 *
9166 * PARAMETERS :
9167 * @cameraId : camera Id
9168 *
9169 * RETURN : int32_t type of status
9170 * 0 -- success
9171 * non-zero failure code
9172 *==========================================================================*/
9173int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9174{
9175 int rc = 0;
9176 CameraMetadata staticInfo;
9177 size_t count = 0;
9178 bool limitedDevice = false;
9179 char prop[PROPERTY_VALUE_MAX];
9180 bool supportBurst = false;
9181
9182 supportBurst = supportBurstCapture(cameraId);
9183
9184 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9185 * guaranteed or if min fps of max resolution is less than 20 fps, its
9186 * advertised as limited device*/
9187 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9188 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9189 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9190 !supportBurst;
9191
9192 uint8_t supportedHwLvl = limitedDevice ?
9193 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009194#ifndef USE_HAL_3_3
9195 // LEVEL_3 - This device will support level 3.
9196 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9197#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009198 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009199#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009200
9201 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9202 &supportedHwLvl, 1);
9203
9204 bool facingBack = false;
9205 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9206 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9207 facingBack = true;
9208 }
9209 /*HAL 3 only*/
9210 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9211 &gCamCapability[cameraId]->min_focus_distance, 1);
9212
9213 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9214 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9215
9216 /*should be using focal lengths but sensor doesn't provide that info now*/
9217 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9218 &gCamCapability[cameraId]->focal_length,
9219 1);
9220
9221 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9222 gCamCapability[cameraId]->apertures,
9223 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9224
9225 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9226 gCamCapability[cameraId]->filter_densities,
9227 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9228
9229
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009230 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9231 size_t mode_count =
9232 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9233 for (size_t i = 0; i < mode_count; i++) {
9234 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9235 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009236 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009237 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009238
9239 int32_t lens_shading_map_size[] = {
9240 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9241 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9242 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9243 lens_shading_map_size,
9244 sizeof(lens_shading_map_size)/sizeof(int32_t));
9245
9246 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9247 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9248
9249 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9250 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9251
9252 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9253 &gCamCapability[cameraId]->max_frame_duration, 1);
9254
9255 camera_metadata_rational baseGainFactor = {
9256 gCamCapability[cameraId]->base_gain_factor.numerator,
9257 gCamCapability[cameraId]->base_gain_factor.denominator};
9258 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9259 &baseGainFactor, 1);
9260
9261 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9262 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9263
9264 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9265 gCamCapability[cameraId]->pixel_array_size.height};
9266 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9267 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9268
9269 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9270 gCamCapability[cameraId]->active_array_size.top,
9271 gCamCapability[cameraId]->active_array_size.width,
9272 gCamCapability[cameraId]->active_array_size.height};
9273 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9274 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9275
9276 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9277 &gCamCapability[cameraId]->white_level, 1);
9278
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009279 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9280 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9281 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009282 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009283 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009284
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009285#ifndef USE_HAL_3_3
9286 bool hasBlackRegions = false;
9287 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9288 LOGW("black_region_count: %d is bounded to %d",
9289 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9290 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9291 }
9292 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9293 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9294 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9295 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9296 }
9297 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9298 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9299 hasBlackRegions = true;
9300 }
9301#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009302 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9303 &gCamCapability[cameraId]->flash_charge_duration, 1);
9304
9305 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9306 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9307
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009308 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9309 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9310 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009311 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9312 &timestampSource, 1);
9313
Thierry Strudel54dc9782017-02-15 12:12:10 -08009314 //update histogram vendor data
9315 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009316 &gCamCapability[cameraId]->histogram_size, 1);
9317
Thierry Strudel54dc9782017-02-15 12:12:10 -08009318 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009319 &gCamCapability[cameraId]->max_histogram_count, 1);
9320
Shuzhen Wang14415f52016-11-16 18:26:18 -08009321 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9322 //so that app can request fewer number of bins than the maximum supported.
9323 std::vector<int32_t> histBins;
9324 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9325 histBins.push_back(maxHistBins);
9326 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9327 (maxHistBins & 0x1) == 0) {
9328 histBins.push_back(maxHistBins >> 1);
9329 maxHistBins >>= 1;
9330 }
9331 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9332 histBins.data(), histBins.size());
9333
Thierry Strudel3d639192016-09-09 11:52:26 -07009334 int32_t sharpness_map_size[] = {
9335 gCamCapability[cameraId]->sharpness_map_size.width,
9336 gCamCapability[cameraId]->sharpness_map_size.height};
9337
9338 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9339 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9340
9341 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9342 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9343
Emilian Peev0f3c3162017-03-15 12:57:46 +00009344 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9345 if (0 <= indexPD) {
9346 // Advertise PD stats data as part of the Depth capabilities
9347 int32_t depthWidth =
9348 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9349 int32_t depthHeight =
9350 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009351 int32_t depthStride =
9352 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009353 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9354 assert(0 < depthSamplesCount);
9355 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9356 &depthSamplesCount, 1);
9357
9358 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9359 depthHeight,
9360 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9361 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9362 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9363 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9364 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9365
9366 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9367 depthHeight, 33333333,
9368 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9369 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9370 depthMinDuration,
9371 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9372
9373 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9374 depthHeight, 0,
9375 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9376 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9377 depthStallDuration,
9378 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9379
9380 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9381 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009382
9383 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9384 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9385 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009386 }
9387
Thierry Strudel3d639192016-09-09 11:52:26 -07009388 int32_t scalar_formats[] = {
9389 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9390 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9391 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9392 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9393 HAL_PIXEL_FORMAT_RAW10,
9394 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009395 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9396 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9397 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009398
9399 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9400 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9401 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9402 count, MAX_SIZES_CNT, available_processed_sizes);
9403 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9404 available_processed_sizes, count * 2);
9405
9406 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9407 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9408 makeTable(gCamCapability[cameraId]->raw_dim,
9409 count, MAX_SIZES_CNT, available_raw_sizes);
9410 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9411 available_raw_sizes, count * 2);
9412
9413 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9414 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9415 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9416 count, MAX_SIZES_CNT, available_fps_ranges);
9417 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9418 available_fps_ranges, count * 2);
9419
9420 camera_metadata_rational exposureCompensationStep = {
9421 gCamCapability[cameraId]->exp_compensation_step.numerator,
9422 gCamCapability[cameraId]->exp_compensation_step.denominator};
9423 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9424 &exposureCompensationStep, 1);
9425
9426 Vector<uint8_t> availableVstabModes;
9427 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9428 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009429 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009430 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009431 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009432 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009433 count = IS_TYPE_MAX;
9434 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9435 for (size_t i = 0; i < count; i++) {
9436 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9437 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9438 eisSupported = true;
9439 break;
9440 }
9441 }
9442 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009443 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9444 }
9445 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9446 availableVstabModes.array(), availableVstabModes.size());
9447
9448 /*HAL 1 and HAL 3 common*/
9449 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9450 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9451 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009452 // Cap the max zoom to the max preferred value
9453 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009454 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9455 &maxZoom, 1);
9456
9457 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9458 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9459
9460 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9461 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9462 max3aRegions[2] = 0; /* AF not supported */
9463 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9464 max3aRegions, 3);
9465
9466 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9467 memset(prop, 0, sizeof(prop));
9468 property_get("persist.camera.facedetect", prop, "1");
9469 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9470 LOGD("Support face detection mode: %d",
9471 supportedFaceDetectMode);
9472
9473 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009474 /* support mode should be OFF if max number of face is 0 */
9475 if (maxFaces <= 0) {
9476 supportedFaceDetectMode = 0;
9477 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009478 Vector<uint8_t> availableFaceDetectModes;
9479 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9480 if (supportedFaceDetectMode == 1) {
9481 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9482 } else if (supportedFaceDetectMode == 2) {
9483 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9484 } else if (supportedFaceDetectMode == 3) {
9485 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9486 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9487 } else {
9488 maxFaces = 0;
9489 }
9490 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9491 availableFaceDetectModes.array(),
9492 availableFaceDetectModes.size());
9493 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9494 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009495 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9496 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9497 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009498
9499 int32_t exposureCompensationRange[] = {
9500 gCamCapability[cameraId]->exposure_compensation_min,
9501 gCamCapability[cameraId]->exposure_compensation_max};
9502 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9503 exposureCompensationRange,
9504 sizeof(exposureCompensationRange)/sizeof(int32_t));
9505
9506 uint8_t lensFacing = (facingBack) ?
9507 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9508 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9509
9510 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9511 available_thumbnail_sizes,
9512 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9513
9514 /*all sizes will be clubbed into this tag*/
9515 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9516 /*android.scaler.availableStreamConfigurations*/
9517 Vector<int32_t> available_stream_configs;
9518 cam_dimension_t active_array_dim;
9519 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9520 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009521
9522 /*advertise list of input dimensions supported based on below property.
9523 By default all sizes upto 5MP will be advertised.
9524 Note that the setprop resolution format should be WxH.
9525 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9526 To list all supported sizes, setprop needs to be set with "0x0" */
9527 cam_dimension_t minInputSize = {2592,1944}; //5MP
9528 memset(prop, 0, sizeof(prop));
9529 property_get("persist.camera.input.minsize", prop, "2592x1944");
9530 if (strlen(prop) > 0) {
9531 char *saveptr = NULL;
9532 char *token = strtok_r(prop, "x", &saveptr);
9533 if (token != NULL) {
9534 minInputSize.width = atoi(token);
9535 }
9536 token = strtok_r(NULL, "x", &saveptr);
9537 if (token != NULL) {
9538 minInputSize.height = atoi(token);
9539 }
9540 }
9541
Thierry Strudel3d639192016-09-09 11:52:26 -07009542 /* Add input/output stream configurations for each scalar formats*/
9543 for (size_t j = 0; j < scalar_formats_count; j++) {
9544 switch (scalar_formats[j]) {
9545 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9546 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9547 case HAL_PIXEL_FORMAT_RAW10:
9548 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9549 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9550 addStreamConfig(available_stream_configs, scalar_formats[j],
9551 gCamCapability[cameraId]->raw_dim[i],
9552 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9553 }
9554 break;
9555 case HAL_PIXEL_FORMAT_BLOB:
9556 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9557 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9558 addStreamConfig(available_stream_configs, scalar_formats[j],
9559 gCamCapability[cameraId]->picture_sizes_tbl[i],
9560 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9561 }
9562 break;
9563 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9564 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9565 default:
9566 cam_dimension_t largest_picture_size;
9567 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9568 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9569 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9570 addStreamConfig(available_stream_configs, scalar_formats[j],
9571 gCamCapability[cameraId]->picture_sizes_tbl[i],
9572 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009573 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009574 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9575 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009576 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9577 >= minInputSize.width) || (gCamCapability[cameraId]->
9578 picture_sizes_tbl[i].height >= minInputSize.height)) {
9579 addStreamConfig(available_stream_configs, scalar_formats[j],
9580 gCamCapability[cameraId]->picture_sizes_tbl[i],
9581 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9582 }
9583 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009584 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009585
Thierry Strudel3d639192016-09-09 11:52:26 -07009586 break;
9587 }
9588 }
9589
9590 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9591 available_stream_configs.array(), available_stream_configs.size());
9592 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9593 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9594
9595 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9596 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9597
9598 /* android.scaler.availableMinFrameDurations */
9599 Vector<int64_t> available_min_durations;
9600 for (size_t j = 0; j < scalar_formats_count; j++) {
9601 switch (scalar_formats[j]) {
9602 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9603 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9604 case HAL_PIXEL_FORMAT_RAW10:
9605 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9606 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9607 available_min_durations.add(scalar_formats[j]);
9608 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9609 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9610 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9611 }
9612 break;
9613 default:
9614 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9615 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9616 available_min_durations.add(scalar_formats[j]);
9617 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9618 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9619 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9620 }
9621 break;
9622 }
9623 }
9624 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9625 available_min_durations.array(), available_min_durations.size());
9626
9627 Vector<int32_t> available_hfr_configs;
9628 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9629 int32_t fps = 0;
9630 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9631 case CAM_HFR_MODE_60FPS:
9632 fps = 60;
9633 break;
9634 case CAM_HFR_MODE_90FPS:
9635 fps = 90;
9636 break;
9637 case CAM_HFR_MODE_120FPS:
9638 fps = 120;
9639 break;
9640 case CAM_HFR_MODE_150FPS:
9641 fps = 150;
9642 break;
9643 case CAM_HFR_MODE_180FPS:
9644 fps = 180;
9645 break;
9646 case CAM_HFR_MODE_210FPS:
9647 fps = 210;
9648 break;
9649 case CAM_HFR_MODE_240FPS:
9650 fps = 240;
9651 break;
9652 case CAM_HFR_MODE_480FPS:
9653 fps = 480;
9654 break;
9655 case CAM_HFR_MODE_OFF:
9656 case CAM_HFR_MODE_MAX:
9657 default:
9658 break;
9659 }
9660
9661 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9662 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9663 /* For each HFR frame rate, need to advertise one variable fps range
9664 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9665 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9666 * set by the app. When video recording is started, [120, 120] is
9667 * set. This way sensor configuration does not change when recording
9668 * is started */
9669
9670 /* (width, height, fps_min, fps_max, batch_size_max) */
9671 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9672 j < MAX_SIZES_CNT; j++) {
9673 available_hfr_configs.add(
9674 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9675 available_hfr_configs.add(
9676 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9677 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9678 available_hfr_configs.add(fps);
9679 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9680
9681 /* (width, height, fps_min, fps_max, batch_size_max) */
9682 available_hfr_configs.add(
9683 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9684 available_hfr_configs.add(
9685 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9686 available_hfr_configs.add(fps);
9687 available_hfr_configs.add(fps);
9688 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9689 }
9690 }
9691 }
9692 //Advertise HFR capability only if the property is set
9693 memset(prop, 0, sizeof(prop));
9694 property_get("persist.camera.hal3hfr.enable", prop, "1");
9695 uint8_t hfrEnable = (uint8_t)atoi(prop);
9696
9697 if(hfrEnable && available_hfr_configs.array()) {
9698 staticInfo.update(
9699 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9700 available_hfr_configs.array(), available_hfr_configs.size());
9701 }
9702
9703 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9704 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9705 &max_jpeg_size, 1);
9706
9707 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9708 size_t size = 0;
9709 count = CAM_EFFECT_MODE_MAX;
9710 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9711 for (size_t i = 0; i < count; i++) {
9712 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9713 gCamCapability[cameraId]->supported_effects[i]);
9714 if (NAME_NOT_FOUND != val) {
9715 avail_effects[size] = (uint8_t)val;
9716 size++;
9717 }
9718 }
9719 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9720 avail_effects,
9721 size);
9722
9723 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9724 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9725 size_t supported_scene_modes_cnt = 0;
9726 count = CAM_SCENE_MODE_MAX;
9727 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9728 for (size_t i = 0; i < count; i++) {
9729 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9730 CAM_SCENE_MODE_OFF) {
9731 int val = lookupFwkName(SCENE_MODES_MAP,
9732 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9733 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009734
Thierry Strudel3d639192016-09-09 11:52:26 -07009735 if (NAME_NOT_FOUND != val) {
9736 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9737 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9738 supported_scene_modes_cnt++;
9739 }
9740 }
9741 }
9742 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9743 avail_scene_modes,
9744 supported_scene_modes_cnt);
9745
9746 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9747 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9748 supported_scene_modes_cnt,
9749 CAM_SCENE_MODE_MAX,
9750 scene_mode_overrides,
9751 supported_indexes,
9752 cameraId);
9753
9754 if (supported_scene_modes_cnt == 0) {
9755 supported_scene_modes_cnt = 1;
9756 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9757 }
9758
9759 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9760 scene_mode_overrides, supported_scene_modes_cnt * 3);
9761
9762 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9763 ANDROID_CONTROL_MODE_AUTO,
9764 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9765 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9766 available_control_modes,
9767 3);
9768
9769 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9770 size = 0;
9771 count = CAM_ANTIBANDING_MODE_MAX;
9772 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9773 for (size_t i = 0; i < count; i++) {
9774 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9775 gCamCapability[cameraId]->supported_antibandings[i]);
9776 if (NAME_NOT_FOUND != val) {
9777 avail_antibanding_modes[size] = (uint8_t)val;
9778 size++;
9779 }
9780
9781 }
9782 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9783 avail_antibanding_modes,
9784 size);
9785
9786 uint8_t avail_abberation_modes[] = {
9787 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9788 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9789 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9790 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9791 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9792 if (0 == count) {
9793 // If no aberration correction modes are available for a device, this advertise OFF mode
9794 size = 1;
9795 } else {
9796 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9797 // So, advertize all 3 modes if atleast any one mode is supported as per the
9798 // new M requirement
9799 size = 3;
9800 }
9801 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9802 avail_abberation_modes,
9803 size);
9804
9805 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9806 size = 0;
9807 count = CAM_FOCUS_MODE_MAX;
9808 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9809 for (size_t i = 0; i < count; i++) {
9810 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9811 gCamCapability[cameraId]->supported_focus_modes[i]);
9812 if (NAME_NOT_FOUND != val) {
9813 avail_af_modes[size] = (uint8_t)val;
9814 size++;
9815 }
9816 }
9817 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9818 avail_af_modes,
9819 size);
9820
9821 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9822 size = 0;
9823 count = CAM_WB_MODE_MAX;
9824 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9825 for (size_t i = 0; i < count; i++) {
9826 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9827 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9828 gCamCapability[cameraId]->supported_white_balances[i]);
9829 if (NAME_NOT_FOUND != val) {
9830 avail_awb_modes[size] = (uint8_t)val;
9831 size++;
9832 }
9833 }
9834 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9835 avail_awb_modes,
9836 size);
9837
9838 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9839 count = CAM_FLASH_FIRING_LEVEL_MAX;
9840 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9841 count);
9842 for (size_t i = 0; i < count; i++) {
9843 available_flash_levels[i] =
9844 gCamCapability[cameraId]->supported_firing_levels[i];
9845 }
9846 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9847 available_flash_levels, count);
9848
9849 uint8_t flashAvailable;
9850 if (gCamCapability[cameraId]->flash_available)
9851 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9852 else
9853 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9854 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9855 &flashAvailable, 1);
9856
9857 Vector<uint8_t> avail_ae_modes;
9858 count = CAM_AE_MODE_MAX;
9859 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9860 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009861 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9862 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9863 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9864 }
9865 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009866 }
9867 if (flashAvailable) {
9868 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9869 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9870 }
9871 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9872 avail_ae_modes.array(),
9873 avail_ae_modes.size());
9874
9875 int32_t sensitivity_range[2];
9876 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9877 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9878 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9879 sensitivity_range,
9880 sizeof(sensitivity_range) / sizeof(int32_t));
9881
9882 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9883 &gCamCapability[cameraId]->max_analog_sensitivity,
9884 1);
9885
9886 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9887 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9888 &sensor_orientation,
9889 1);
9890
9891 int32_t max_output_streams[] = {
9892 MAX_STALLING_STREAMS,
9893 MAX_PROCESSED_STREAMS,
9894 MAX_RAW_STREAMS};
9895 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9896 max_output_streams,
9897 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9898
9899 uint8_t avail_leds = 0;
9900 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9901 &avail_leds, 0);
9902
9903 uint8_t focus_dist_calibrated;
9904 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9905 gCamCapability[cameraId]->focus_dist_calibrated);
9906 if (NAME_NOT_FOUND != val) {
9907 focus_dist_calibrated = (uint8_t)val;
9908 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9909 &focus_dist_calibrated, 1);
9910 }
9911
9912 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9913 size = 0;
9914 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9915 MAX_TEST_PATTERN_CNT);
9916 for (size_t i = 0; i < count; i++) {
9917 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9918 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9919 if (NAME_NOT_FOUND != testpatternMode) {
9920 avail_testpattern_modes[size] = testpatternMode;
9921 size++;
9922 }
9923 }
9924 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9925 avail_testpattern_modes,
9926 size);
9927
9928 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9929 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9930 &max_pipeline_depth,
9931 1);
9932
9933 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9934 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9935 &partial_result_count,
9936 1);
9937
9938 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9939 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9940
9941 Vector<uint8_t> available_capabilities;
9942 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9943 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9944 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9945 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9946 if (supportBurst) {
9947 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9948 }
9949 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9950 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9951 if (hfrEnable && available_hfr_configs.array()) {
9952 available_capabilities.add(
9953 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9954 }
9955
9956 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9957 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9958 }
9959 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9960 available_capabilities.array(),
9961 available_capabilities.size());
9962
9963 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9964 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9965 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9966 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9967
9968 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9969 &aeLockAvailable, 1);
9970
9971 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9972 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9973 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9974 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9975
9976 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9977 &awbLockAvailable, 1);
9978
9979 int32_t max_input_streams = 1;
9980 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9981 &max_input_streams,
9982 1);
9983
9984 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9985 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9986 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9987 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9988 HAL_PIXEL_FORMAT_YCbCr_420_888};
9989 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9990 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9991
9992 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9993 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9994 &max_latency,
9995 1);
9996
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009997#ifndef USE_HAL_3_3
9998 int32_t isp_sensitivity_range[2];
9999 isp_sensitivity_range[0] =
10000 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10001 isp_sensitivity_range[1] =
10002 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10003 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10004 isp_sensitivity_range,
10005 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10006#endif
10007
Thierry Strudel3d639192016-09-09 11:52:26 -070010008 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10009 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10010 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10011 available_hot_pixel_modes,
10012 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10013
10014 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10015 ANDROID_SHADING_MODE_FAST,
10016 ANDROID_SHADING_MODE_HIGH_QUALITY};
10017 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10018 available_shading_modes,
10019 3);
10020
10021 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10022 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10023 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10024 available_lens_shading_map_modes,
10025 2);
10026
10027 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10028 ANDROID_EDGE_MODE_FAST,
10029 ANDROID_EDGE_MODE_HIGH_QUALITY,
10030 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10031 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10032 available_edge_modes,
10033 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10034
10035 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10036 ANDROID_NOISE_REDUCTION_MODE_FAST,
10037 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10038 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10039 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10040 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10041 available_noise_red_modes,
10042 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10043
10044 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10045 ANDROID_TONEMAP_MODE_FAST,
10046 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10047 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10048 available_tonemap_modes,
10049 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10050
10051 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10052 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10053 available_hot_pixel_map_modes,
10054 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10055
10056 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10057 gCamCapability[cameraId]->reference_illuminant1);
10058 if (NAME_NOT_FOUND != val) {
10059 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10060 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10061 }
10062
10063 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10064 gCamCapability[cameraId]->reference_illuminant2);
10065 if (NAME_NOT_FOUND != val) {
10066 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10067 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10068 }
10069
10070 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10071 (void *)gCamCapability[cameraId]->forward_matrix1,
10072 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10073
10074 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10075 (void *)gCamCapability[cameraId]->forward_matrix2,
10076 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10077
10078 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10079 (void *)gCamCapability[cameraId]->color_transform1,
10080 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10081
10082 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10083 (void *)gCamCapability[cameraId]->color_transform2,
10084 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10085
10086 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10087 (void *)gCamCapability[cameraId]->calibration_transform1,
10088 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10089
10090 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10091 (void *)gCamCapability[cameraId]->calibration_transform2,
10092 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10093
10094 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10095 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10096 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10097 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10098 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10099 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10100 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10101 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10102 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10103 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10104 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10105 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10106 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10107 ANDROID_JPEG_GPS_COORDINATES,
10108 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10109 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10110 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10111 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10112 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10113 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10114 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10115 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10116 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10117 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010118#ifndef USE_HAL_3_3
10119 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10120#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010121 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010122 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010123 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10124 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010125 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010126 /* DevCamDebug metadata request_keys_basic */
10127 DEVCAMDEBUG_META_ENABLE,
10128 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010129 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010130 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010131 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010132 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010133 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010134 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010135
10136 size_t request_keys_cnt =
10137 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10138 Vector<int32_t> available_request_keys;
10139 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10140 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10141 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10142 }
10143
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010144 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010145 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10146 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10147 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010148 }
10149
Thierry Strudel3d639192016-09-09 11:52:26 -070010150 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10151 available_request_keys.array(), available_request_keys.size());
10152
10153 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10154 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10155 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10156 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10157 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10158 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10159 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10160 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10161 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10162 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10163 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10164 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10165 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10166 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10167 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10168 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10169 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010170 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010171 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10172 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10173 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010174 ANDROID_STATISTICS_FACE_SCORES,
10175#ifndef USE_HAL_3_3
10176 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10177#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010178 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010179 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010180 // DevCamDebug metadata result_keys_basic
10181 DEVCAMDEBUG_META_ENABLE,
10182 // DevCamDebug metadata result_keys AF
10183 DEVCAMDEBUG_AF_LENS_POSITION,
10184 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10185 DEVCAMDEBUG_AF_TOF_DISTANCE,
10186 DEVCAMDEBUG_AF_LUMA,
10187 DEVCAMDEBUG_AF_HAF_STATE,
10188 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10189 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10190 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10191 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10192 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10193 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10194 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10195 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10196 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10197 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10198 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10199 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10200 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10201 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10202 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10203 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10204 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10205 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10206 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10207 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10208 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10209 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10210 // DevCamDebug metadata result_keys AEC
10211 DEVCAMDEBUG_AEC_TARGET_LUMA,
10212 DEVCAMDEBUG_AEC_COMP_LUMA,
10213 DEVCAMDEBUG_AEC_AVG_LUMA,
10214 DEVCAMDEBUG_AEC_CUR_LUMA,
10215 DEVCAMDEBUG_AEC_LINECOUNT,
10216 DEVCAMDEBUG_AEC_REAL_GAIN,
10217 DEVCAMDEBUG_AEC_EXP_INDEX,
10218 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010219 // DevCamDebug metadata result_keys zzHDR
10220 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10221 DEVCAMDEBUG_AEC_L_LINECOUNT,
10222 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10223 DEVCAMDEBUG_AEC_S_LINECOUNT,
10224 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10225 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10226 // DevCamDebug metadata result_keys ADRC
10227 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10228 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10229 DEVCAMDEBUG_AEC_GTM_RATIO,
10230 DEVCAMDEBUG_AEC_LTM_RATIO,
10231 DEVCAMDEBUG_AEC_LA_RATIO,
10232 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010233 // DevCamDebug metadata result_keys AEC MOTION
10234 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10235 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10236 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010237 // DevCamDebug metadata result_keys AWB
10238 DEVCAMDEBUG_AWB_R_GAIN,
10239 DEVCAMDEBUG_AWB_G_GAIN,
10240 DEVCAMDEBUG_AWB_B_GAIN,
10241 DEVCAMDEBUG_AWB_CCT,
10242 DEVCAMDEBUG_AWB_DECISION,
10243 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010244 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10245 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10246 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010247 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010248 };
10249
Thierry Strudel3d639192016-09-09 11:52:26 -070010250 size_t result_keys_cnt =
10251 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10252
10253 Vector<int32_t> available_result_keys;
10254 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10255 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10256 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10257 }
10258 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10259 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10260 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10261 }
10262 if (supportedFaceDetectMode == 1) {
10263 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10264 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10265 } else if ((supportedFaceDetectMode == 2) ||
10266 (supportedFaceDetectMode == 3)) {
10267 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10268 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10269 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010270#ifndef USE_HAL_3_3
10271 if (hasBlackRegions) {
10272 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10273 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10274 }
10275#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010276
10277 if (gExposeEnableZslKey) {
10278 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10279 }
10280
Thierry Strudel3d639192016-09-09 11:52:26 -070010281 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10282 available_result_keys.array(), available_result_keys.size());
10283
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010284 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010285 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10286 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10287 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10288 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10289 ANDROID_SCALER_CROPPING_TYPE,
10290 ANDROID_SYNC_MAX_LATENCY,
10291 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10292 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10293 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10294 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10295 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10296 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10297 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10298 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10299 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10300 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10301 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10302 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10303 ANDROID_LENS_FACING,
10304 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10305 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10306 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10307 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10308 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10309 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10310 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10311 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10312 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10313 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10314 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10315 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10316 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10317 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10318 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10319 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10320 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10321 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10322 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10323 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010324 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010325 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10326 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10327 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10328 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10329 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10330 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10331 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10332 ANDROID_CONTROL_AVAILABLE_MODES,
10333 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10334 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10335 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10336 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010337 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10338#ifndef USE_HAL_3_3
10339 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10340 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10341#endif
10342 };
10343
10344 Vector<int32_t> available_characteristics_keys;
10345 available_characteristics_keys.appendArray(characteristics_keys_basic,
10346 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10347#ifndef USE_HAL_3_3
10348 if (hasBlackRegions) {
10349 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10350 }
10351#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010352
10353 if (0 <= indexPD) {
10354 int32_t depthKeys[] = {
10355 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10356 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10357 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10358 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10359 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10360 };
10361 available_characteristics_keys.appendArray(depthKeys,
10362 sizeof(depthKeys) / sizeof(depthKeys[0]));
10363 }
10364
Thierry Strudel3d639192016-09-09 11:52:26 -070010365 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010366 available_characteristics_keys.array(),
10367 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010368
10369 /*available stall durations depend on the hw + sw and will be different for different devices */
10370 /*have to add for raw after implementation*/
10371 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10372 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10373
10374 Vector<int64_t> available_stall_durations;
10375 for (uint32_t j = 0; j < stall_formats_count; j++) {
10376 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10377 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10378 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10379 available_stall_durations.add(stall_formats[j]);
10380 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10381 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10382 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10383 }
10384 } else {
10385 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10386 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10387 available_stall_durations.add(stall_formats[j]);
10388 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10389 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10390 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10391 }
10392 }
10393 }
10394 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10395 available_stall_durations.array(),
10396 available_stall_durations.size());
10397
10398 //QCAMERA3_OPAQUE_RAW
10399 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10400 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10401 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10402 case LEGACY_RAW:
10403 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10404 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10405 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10406 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10407 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10408 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10409 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10410 break;
10411 case MIPI_RAW:
10412 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10413 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10414 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10415 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10416 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10417 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10418 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10419 break;
10420 default:
10421 LOGE("unknown opaque_raw_format %d",
10422 gCamCapability[cameraId]->opaque_raw_fmt);
10423 break;
10424 }
10425 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10426
10427 Vector<int32_t> strides;
10428 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10429 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10430 cam_stream_buf_plane_info_t buf_planes;
10431 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10432 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10433 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10434 &gCamCapability[cameraId]->padding_info, &buf_planes);
10435 strides.add(buf_planes.plane_info.mp[0].stride);
10436 }
10437 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10438 strides.size());
10439
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010440 //TBD: remove the following line once backend advertises zzHDR in feature mask
10441 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010442 //Video HDR default
10443 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10444 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010445 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010446 int32_t vhdr_mode[] = {
10447 QCAMERA3_VIDEO_HDR_MODE_OFF,
10448 QCAMERA3_VIDEO_HDR_MODE_ON};
10449
10450 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10451 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10452 vhdr_mode, vhdr_mode_count);
10453 }
10454
Thierry Strudel3d639192016-09-09 11:52:26 -070010455 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10456 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10457 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10458
10459 uint8_t isMonoOnly =
10460 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10461 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10462 &isMonoOnly, 1);
10463
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010464#ifndef USE_HAL_3_3
10465 Vector<int32_t> opaque_size;
10466 for (size_t j = 0; j < scalar_formats_count; j++) {
10467 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10468 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10469 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10470 cam_stream_buf_plane_info_t buf_planes;
10471
10472 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10473 &gCamCapability[cameraId]->padding_info, &buf_planes);
10474
10475 if (rc == 0) {
10476 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10477 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10478 opaque_size.add(buf_planes.plane_info.frame_len);
10479 }else {
10480 LOGE("raw frame calculation failed!");
10481 }
10482 }
10483 }
10484 }
10485
10486 if ((opaque_size.size() > 0) &&
10487 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10488 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10489 else
10490 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10491#endif
10492
Thierry Strudel04e026f2016-10-10 11:27:36 -070010493 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10494 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10495 size = 0;
10496 count = CAM_IR_MODE_MAX;
10497 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10498 for (size_t i = 0; i < count; i++) {
10499 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10500 gCamCapability[cameraId]->supported_ir_modes[i]);
10501 if (NAME_NOT_FOUND != val) {
10502 avail_ir_modes[size] = (int32_t)val;
10503 size++;
10504 }
10505 }
10506 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10507 avail_ir_modes, size);
10508 }
10509
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010510 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10511 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10512 size = 0;
10513 count = CAM_AEC_CONVERGENCE_MAX;
10514 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10515 for (size_t i = 0; i < count; i++) {
10516 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10517 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10518 if (NAME_NOT_FOUND != val) {
10519 available_instant_aec_modes[size] = (int32_t)val;
10520 size++;
10521 }
10522 }
10523 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10524 available_instant_aec_modes, size);
10525 }
10526
Thierry Strudel54dc9782017-02-15 12:12:10 -080010527 int32_t sharpness_range[] = {
10528 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10529 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10530 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10531
10532 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10533 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10534 size = 0;
10535 count = CAM_BINNING_CORRECTION_MODE_MAX;
10536 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10537 for (size_t i = 0; i < count; i++) {
10538 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10539 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10540 gCamCapability[cameraId]->supported_binning_modes[i]);
10541 if (NAME_NOT_FOUND != val) {
10542 avail_binning_modes[size] = (int32_t)val;
10543 size++;
10544 }
10545 }
10546 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10547 avail_binning_modes, size);
10548 }
10549
10550 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10551 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10552 size = 0;
10553 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10554 for (size_t i = 0; i < count; i++) {
10555 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10556 gCamCapability[cameraId]->supported_aec_modes[i]);
10557 if (NAME_NOT_FOUND != val)
10558 available_aec_modes[size++] = val;
10559 }
10560 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10561 available_aec_modes, size);
10562 }
10563
10564 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10565 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10566 size = 0;
10567 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10568 for (size_t i = 0; i < count; i++) {
10569 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10570 gCamCapability[cameraId]->supported_iso_modes[i]);
10571 if (NAME_NOT_FOUND != val)
10572 available_iso_modes[size++] = val;
10573 }
10574 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10575 available_iso_modes, size);
10576 }
10577
10578 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010579 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010580 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10581 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10582 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10583
10584 int32_t available_saturation_range[4];
10585 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10586 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10587 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10588 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10589 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10590 available_saturation_range, 4);
10591
10592 uint8_t is_hdr_values[2];
10593 is_hdr_values[0] = 0;
10594 is_hdr_values[1] = 1;
10595 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10596 is_hdr_values, 2);
10597
10598 float is_hdr_confidence_range[2];
10599 is_hdr_confidence_range[0] = 0.0;
10600 is_hdr_confidence_range[1] = 1.0;
10601 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10602 is_hdr_confidence_range, 2);
10603
Emilian Peev0a972ef2017-03-16 10:25:53 +000010604 size_t eepromLength = strnlen(
10605 reinterpret_cast<const char *>(
10606 gCamCapability[cameraId]->eeprom_version_info),
10607 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10608 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010609 char easelInfo[] = ",E:N";
10610 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10611 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10612 eepromLength += sizeof(easelInfo);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010613 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10614 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010615 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010616 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010617 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10618 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10619 }
10620
Thierry Strudel3d639192016-09-09 11:52:26 -070010621 gStaticMetadata[cameraId] = staticInfo.release();
10622 return rc;
10623}
10624
10625/*===========================================================================
10626 * FUNCTION : makeTable
10627 *
10628 * DESCRIPTION: make a table of sizes
10629 *
10630 * PARAMETERS :
10631 *
10632 *
10633 *==========================================================================*/
10634void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10635 size_t max_size, int32_t *sizeTable)
10636{
10637 size_t j = 0;
10638 if (size > max_size) {
10639 size = max_size;
10640 }
10641 for (size_t i = 0; i < size; i++) {
10642 sizeTable[j] = dimTable[i].width;
10643 sizeTable[j+1] = dimTable[i].height;
10644 j+=2;
10645 }
10646}
10647
10648/*===========================================================================
10649 * FUNCTION : makeFPSTable
10650 *
10651 * DESCRIPTION: make a table of fps ranges
10652 *
10653 * PARAMETERS :
10654 *
10655 *==========================================================================*/
10656void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10657 size_t max_size, int32_t *fpsRangesTable)
10658{
10659 size_t j = 0;
10660 if (size > max_size) {
10661 size = max_size;
10662 }
10663 for (size_t i = 0; i < size; i++) {
10664 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10665 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10666 j+=2;
10667 }
10668}
10669
10670/*===========================================================================
10671 * FUNCTION : makeOverridesList
10672 *
10673 * DESCRIPTION: make a list of scene mode overrides
10674 *
10675 * PARAMETERS :
10676 *
10677 *
10678 *==========================================================================*/
10679void QCamera3HardwareInterface::makeOverridesList(
10680 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10681 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10682{
10683 /*daemon will give a list of overrides for all scene modes.
10684 However we should send the fwk only the overrides for the scene modes
10685 supported by the framework*/
10686 size_t j = 0;
10687 if (size > max_size) {
10688 size = max_size;
10689 }
10690 size_t focus_count = CAM_FOCUS_MODE_MAX;
10691 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10692 focus_count);
10693 for (size_t i = 0; i < size; i++) {
10694 bool supt = false;
10695 size_t index = supported_indexes[i];
10696 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10697 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10698 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10699 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10700 overridesTable[index].awb_mode);
10701 if (NAME_NOT_FOUND != val) {
10702 overridesList[j+1] = (uint8_t)val;
10703 }
10704 uint8_t focus_override = overridesTable[index].af_mode;
10705 for (size_t k = 0; k < focus_count; k++) {
10706 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10707 supt = true;
10708 break;
10709 }
10710 }
10711 if (supt) {
10712 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10713 focus_override);
10714 if (NAME_NOT_FOUND != val) {
10715 overridesList[j+2] = (uint8_t)val;
10716 }
10717 } else {
10718 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10719 }
10720 j+=3;
10721 }
10722}
10723
10724/*===========================================================================
10725 * FUNCTION : filterJpegSizes
10726 *
10727 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10728 * could be downscaled to
10729 *
10730 * PARAMETERS :
10731 *
10732 * RETURN : length of jpegSizes array
10733 *==========================================================================*/
10734
10735size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10736 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10737 uint8_t downscale_factor)
10738{
10739 if (0 == downscale_factor) {
10740 downscale_factor = 1;
10741 }
10742
10743 int32_t min_width = active_array_size.width / downscale_factor;
10744 int32_t min_height = active_array_size.height / downscale_factor;
10745 size_t jpegSizesCnt = 0;
10746 if (processedSizesCnt > maxCount) {
10747 processedSizesCnt = maxCount;
10748 }
10749 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10750 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10751 jpegSizes[jpegSizesCnt] = processedSizes[i];
10752 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10753 jpegSizesCnt += 2;
10754 }
10755 }
10756 return jpegSizesCnt;
10757}
10758
10759/*===========================================================================
10760 * FUNCTION : computeNoiseModelEntryS
10761 *
10762 * DESCRIPTION: function to map a given sensitivity to the S noise
10763 * model parameters in the DNG noise model.
10764 *
10765 * PARAMETERS : sens : the sensor sensitivity
10766 *
10767 ** RETURN : S (sensor amplification) noise
10768 *
10769 *==========================================================================*/
10770double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10771 double s = gCamCapability[mCameraId]->gradient_S * sens +
10772 gCamCapability[mCameraId]->offset_S;
10773 return ((s < 0.0) ? 0.0 : s);
10774}
10775
10776/*===========================================================================
10777 * FUNCTION : computeNoiseModelEntryO
10778 *
10779 * DESCRIPTION: function to map a given sensitivity to the O noise
10780 * model parameters in the DNG noise model.
10781 *
10782 * PARAMETERS : sens : the sensor sensitivity
10783 *
10784 ** RETURN : O (sensor readout) noise
10785 *
10786 *==========================================================================*/
10787double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10788 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10789 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10790 1.0 : (1.0 * sens / max_analog_sens);
10791 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10792 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10793 return ((o < 0.0) ? 0.0 : o);
10794}
10795
10796/*===========================================================================
10797 * FUNCTION : getSensorSensitivity
10798 *
10799 * DESCRIPTION: convert iso_mode to an integer value
10800 *
10801 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10802 *
10803 ** RETURN : sensitivity supported by sensor
10804 *
10805 *==========================================================================*/
10806int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10807{
10808 int32_t sensitivity;
10809
10810 switch (iso_mode) {
10811 case CAM_ISO_MODE_100:
10812 sensitivity = 100;
10813 break;
10814 case CAM_ISO_MODE_200:
10815 sensitivity = 200;
10816 break;
10817 case CAM_ISO_MODE_400:
10818 sensitivity = 400;
10819 break;
10820 case CAM_ISO_MODE_800:
10821 sensitivity = 800;
10822 break;
10823 case CAM_ISO_MODE_1600:
10824 sensitivity = 1600;
10825 break;
10826 default:
10827 sensitivity = -1;
10828 break;
10829 }
10830 return sensitivity;
10831}
10832
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010833int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010834 if (gEaselManagerClient == nullptr) {
10835 gEaselManagerClient = EaselManagerClient::create();
10836 if (gEaselManagerClient == nullptr) {
10837 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10838 return -ENODEV;
10839 }
10840 }
10841
10842 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010843 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10844 // to connect to Easel.
10845 bool doNotpowerOnEasel =
10846 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10847
10848 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010849 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10850 return OK;
10851 }
10852
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010853 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010854 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010855 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010856 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010857 return res;
10858 }
10859
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010860 EaselManagerClientOpened = true;
10861
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010862 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010863 if (res != OK) {
10864 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10865 }
10866
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010867 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010868 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010869
10870 // Expose enableZsl key only when HDR+ mode is enabled.
10871 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010872 }
10873
10874 return OK;
10875}
10876
Thierry Strudel3d639192016-09-09 11:52:26 -070010877/*===========================================================================
10878 * FUNCTION : getCamInfo
10879 *
10880 * DESCRIPTION: query camera capabilities
10881 *
10882 * PARAMETERS :
10883 * @cameraId : camera Id
10884 * @info : camera info struct to be filled in with camera capabilities
10885 *
10886 * RETURN : int type of status
10887 * NO_ERROR -- success
10888 * none-zero failure code
10889 *==========================================================================*/
10890int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10891 struct camera_info *info)
10892{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010893 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010894 int rc = 0;
10895
10896 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010897
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010898 {
10899 Mutex::Autolock l(gHdrPlusClientLock);
10900 rc = initHdrPlusClientLocked();
10901 if (rc != OK) {
10902 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10903 pthread_mutex_unlock(&gCamLock);
10904 return rc;
10905 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010906 }
10907
Thierry Strudel3d639192016-09-09 11:52:26 -070010908 if (NULL == gCamCapability[cameraId]) {
10909 rc = initCapabilities(cameraId);
10910 if (rc < 0) {
10911 pthread_mutex_unlock(&gCamLock);
10912 return rc;
10913 }
10914 }
10915
10916 if (NULL == gStaticMetadata[cameraId]) {
10917 rc = initStaticMetadata(cameraId);
10918 if (rc < 0) {
10919 pthread_mutex_unlock(&gCamLock);
10920 return rc;
10921 }
10922 }
10923
10924 switch(gCamCapability[cameraId]->position) {
10925 case CAM_POSITION_BACK:
10926 case CAM_POSITION_BACK_AUX:
10927 info->facing = CAMERA_FACING_BACK;
10928 break;
10929
10930 case CAM_POSITION_FRONT:
10931 case CAM_POSITION_FRONT_AUX:
10932 info->facing = CAMERA_FACING_FRONT;
10933 break;
10934
10935 default:
10936 LOGE("Unknown position type %d for camera id:%d",
10937 gCamCapability[cameraId]->position, cameraId);
10938 rc = -1;
10939 break;
10940 }
10941
10942
10943 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010944#ifndef USE_HAL_3_3
10945 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10946#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010947 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010948#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010949 info->static_camera_characteristics = gStaticMetadata[cameraId];
10950
10951 //For now assume both cameras can operate independently.
10952 info->conflicting_devices = NULL;
10953 info->conflicting_devices_length = 0;
10954
10955 //resource cost is 100 * MIN(1.0, m/M),
10956 //where m is throughput requirement with maximum stream configuration
10957 //and M is CPP maximum throughput.
10958 float max_fps = 0.0;
10959 for (uint32_t i = 0;
10960 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10961 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10962 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10963 }
10964 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10965 gCamCapability[cameraId]->active_array_size.width *
10966 gCamCapability[cameraId]->active_array_size.height * max_fps /
10967 gCamCapability[cameraId]->max_pixel_bandwidth;
10968 info->resource_cost = 100 * MIN(1.0, ratio);
10969 LOGI("camera %d resource cost is %d", cameraId,
10970 info->resource_cost);
10971
10972 pthread_mutex_unlock(&gCamLock);
10973 return rc;
10974}
10975
10976/*===========================================================================
10977 * FUNCTION : translateCapabilityToMetadata
10978 *
10979 * DESCRIPTION: translate the capability into camera_metadata_t
10980 *
10981 * PARAMETERS : type of the request
10982 *
10983 *
10984 * RETURN : success: camera_metadata_t*
10985 * failure: NULL
10986 *
10987 *==========================================================================*/
10988camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10989{
10990 if (mDefaultMetadata[type] != NULL) {
10991 return mDefaultMetadata[type];
10992 }
10993 //first time we are handling this request
10994 //fill up the metadata structure using the wrapper class
10995 CameraMetadata settings;
10996 //translate from cam_capability_t to camera_metadata_tag_t
10997 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10998 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10999 int32_t defaultRequestID = 0;
11000 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11001
11002 /* OIS disable */
11003 char ois_prop[PROPERTY_VALUE_MAX];
11004 memset(ois_prop, 0, sizeof(ois_prop));
11005 property_get("persist.camera.ois.disable", ois_prop, "0");
11006 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11007
11008 /* Force video to use OIS */
11009 char videoOisProp[PROPERTY_VALUE_MAX];
11010 memset(videoOisProp, 0, sizeof(videoOisProp));
11011 property_get("persist.camera.ois.video", videoOisProp, "1");
11012 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011013
11014 // Hybrid AE enable/disable
11015 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11016 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11017 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11018 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11019
Thierry Strudel3d639192016-09-09 11:52:26 -070011020 uint8_t controlIntent = 0;
11021 uint8_t focusMode;
11022 uint8_t vsMode;
11023 uint8_t optStabMode;
11024 uint8_t cacMode;
11025 uint8_t edge_mode;
11026 uint8_t noise_red_mode;
11027 uint8_t tonemap_mode;
11028 bool highQualityModeEntryAvailable = FALSE;
11029 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011030 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011031 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11032 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011033 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011034 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011035 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011036
Thierry Strudel3d639192016-09-09 11:52:26 -070011037 switch (type) {
11038 case CAMERA3_TEMPLATE_PREVIEW:
11039 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11040 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11041 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11042 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11043 edge_mode = ANDROID_EDGE_MODE_FAST;
11044 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11045 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11046 break;
11047 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11048 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11049 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11050 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11051 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11052 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11053 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11054 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11055 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11056 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11057 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11058 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11059 highQualityModeEntryAvailable = TRUE;
11060 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11061 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11062 fastModeEntryAvailable = TRUE;
11063 }
11064 }
11065 if (highQualityModeEntryAvailable) {
11066 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11067 } else if (fastModeEntryAvailable) {
11068 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11069 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011070 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11071 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11072 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011073 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011074 break;
11075 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11076 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11077 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11078 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011079 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11080 edge_mode = ANDROID_EDGE_MODE_FAST;
11081 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11082 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11083 if (forceVideoOis)
11084 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11085 break;
11086 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11087 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11088 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11089 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011090 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11091 edge_mode = ANDROID_EDGE_MODE_FAST;
11092 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11093 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11094 if (forceVideoOis)
11095 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11096 break;
11097 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11098 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11099 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11100 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11101 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11102 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11103 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11104 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11105 break;
11106 case CAMERA3_TEMPLATE_MANUAL:
11107 edge_mode = ANDROID_EDGE_MODE_FAST;
11108 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11109 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11110 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11111 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11112 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11113 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11114 break;
11115 default:
11116 edge_mode = ANDROID_EDGE_MODE_FAST;
11117 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11118 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11119 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11120 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11121 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11122 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11123 break;
11124 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011125 // Set CAC to OFF if underlying device doesn't support
11126 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11127 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11128 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011129 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11130 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11131 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11132 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11133 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11134 }
11135 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011136 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011137 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011138
11139 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11140 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11141 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11142 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11143 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11144 || ois_disable)
11145 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11146 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011147 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011148
11149 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11150 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11151
11152 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11153 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11154
11155 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11156 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11157
11158 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11159 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11160
11161 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11162 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11163
11164 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11165 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11166
11167 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11168 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11169
11170 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11171 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11172
11173 /*flash*/
11174 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11175 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11176
11177 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11178 settings.update(ANDROID_FLASH_FIRING_POWER,
11179 &flashFiringLevel, 1);
11180
11181 /* lens */
11182 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11183 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11184
11185 if (gCamCapability[mCameraId]->filter_densities_count) {
11186 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11187 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11188 gCamCapability[mCameraId]->filter_densities_count);
11189 }
11190
11191 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11192 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11193
Thierry Strudel3d639192016-09-09 11:52:26 -070011194 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11195 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11196
11197 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11198 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11199
11200 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11201 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11202
11203 /* face detection (default to OFF) */
11204 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11205 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11206
Thierry Strudel54dc9782017-02-15 12:12:10 -080011207 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11208 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011209
11210 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11211 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11212
11213 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11214 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11215
Thierry Strudel3d639192016-09-09 11:52:26 -070011216
11217 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11218 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11219
11220 /* Exposure time(Update the Min Exposure Time)*/
11221 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11222 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11223
11224 /* frame duration */
11225 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11226 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11227
11228 /* sensitivity */
11229 static const int32_t default_sensitivity = 100;
11230 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011231#ifndef USE_HAL_3_3
11232 static const int32_t default_isp_sensitivity =
11233 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11234 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11235#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011236
11237 /*edge mode*/
11238 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11239
11240 /*noise reduction mode*/
11241 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11242
11243 /*color correction mode*/
11244 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11245 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11246
11247 /*transform matrix mode*/
11248 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11249
11250 int32_t scaler_crop_region[4];
11251 scaler_crop_region[0] = 0;
11252 scaler_crop_region[1] = 0;
11253 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11254 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11255 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11256
11257 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11258 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11259
11260 /*focus distance*/
11261 float focus_distance = 0.0;
11262 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11263
11264 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011265 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011266 float max_range = 0.0;
11267 float max_fixed_fps = 0.0;
11268 int32_t fps_range[2] = {0, 0};
11269 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11270 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011271 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11272 TEMPLATE_MAX_PREVIEW_FPS) {
11273 continue;
11274 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011275 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11276 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11277 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11278 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11279 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11280 if (range > max_range) {
11281 fps_range[0] =
11282 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11283 fps_range[1] =
11284 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11285 max_range = range;
11286 }
11287 } else {
11288 if (range < 0.01 && max_fixed_fps <
11289 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11290 fps_range[0] =
11291 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11292 fps_range[1] =
11293 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11294 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11295 }
11296 }
11297 }
11298 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11299
11300 /*precapture trigger*/
11301 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11302 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11303
11304 /*af trigger*/
11305 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11306 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11307
11308 /* ae & af regions */
11309 int32_t active_region[] = {
11310 gCamCapability[mCameraId]->active_array_size.left,
11311 gCamCapability[mCameraId]->active_array_size.top,
11312 gCamCapability[mCameraId]->active_array_size.left +
11313 gCamCapability[mCameraId]->active_array_size.width,
11314 gCamCapability[mCameraId]->active_array_size.top +
11315 gCamCapability[mCameraId]->active_array_size.height,
11316 0};
11317 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11318 sizeof(active_region) / sizeof(active_region[0]));
11319 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11320 sizeof(active_region) / sizeof(active_region[0]));
11321
11322 /* black level lock */
11323 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11324 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11325
Thierry Strudel3d639192016-09-09 11:52:26 -070011326 //special defaults for manual template
11327 if (type == CAMERA3_TEMPLATE_MANUAL) {
11328 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11329 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11330
11331 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11332 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11333
11334 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11335 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11336
11337 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11338 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11339
11340 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11341 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11342
11343 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11344 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11345 }
11346
11347
11348 /* TNR
11349 * We'll use this location to determine which modes TNR will be set.
11350 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11351 * This is not to be confused with linking on a per stream basis that decision
11352 * is still on per-session basis and will be handled as part of config stream
11353 */
11354 uint8_t tnr_enable = 0;
11355
11356 if (m_bTnrPreview || m_bTnrVideo) {
11357
11358 switch (type) {
11359 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11360 tnr_enable = 1;
11361 break;
11362
11363 default:
11364 tnr_enable = 0;
11365 break;
11366 }
11367
11368 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11369 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11370 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11371
11372 LOGD("TNR:%d with process plate %d for template:%d",
11373 tnr_enable, tnr_process_type, type);
11374 }
11375
11376 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011377 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011378 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11379
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011380 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011381 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11382
Shuzhen Wang920ea402017-05-03 08:49:39 -070011383 uint8_t related_camera_id = mCameraId;
11384 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011385
11386 /* CDS default */
11387 char prop[PROPERTY_VALUE_MAX];
11388 memset(prop, 0, sizeof(prop));
11389 property_get("persist.camera.CDS", prop, "Auto");
11390 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11391 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11392 if (CAM_CDS_MODE_MAX == cds_mode) {
11393 cds_mode = CAM_CDS_MODE_AUTO;
11394 }
11395
11396 /* Disabling CDS in templates which have TNR enabled*/
11397 if (tnr_enable)
11398 cds_mode = CAM_CDS_MODE_OFF;
11399
11400 int32_t mode = cds_mode;
11401 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011402
Thierry Strudel269c81a2016-10-12 12:13:59 -070011403 /* Manual Convergence AEC Speed is disabled by default*/
11404 float default_aec_speed = 0;
11405 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11406
11407 /* Manual Convergence AWB Speed is disabled by default*/
11408 float default_awb_speed = 0;
11409 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11410
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011411 // Set instant AEC to normal convergence by default
11412 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11413 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11414
Shuzhen Wang19463d72016-03-08 11:09:52 -080011415 /* hybrid ae */
11416 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11417
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011418 if (gExposeEnableZslKey) {
11419 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11420 }
11421
Thierry Strudel3d639192016-09-09 11:52:26 -070011422 mDefaultMetadata[type] = settings.release();
11423
11424 return mDefaultMetadata[type];
11425}
11426
11427/*===========================================================================
11428 * FUNCTION : setFrameParameters
11429 *
11430 * DESCRIPTION: set parameters per frame as requested in the metadata from
11431 * framework
11432 *
11433 * PARAMETERS :
11434 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011435 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011436 * @blob_request: Whether this request is a blob request or not
11437 *
11438 * RETURN : success: NO_ERROR
11439 * failure:
11440 *==========================================================================*/
11441int QCamera3HardwareInterface::setFrameParameters(
11442 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011443 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011444 int blob_request,
11445 uint32_t snapshotStreamId)
11446{
11447 /*translate from camera_metadata_t type to parm_type_t*/
11448 int rc = 0;
11449 int32_t hal_version = CAM_HAL_V3;
11450
11451 clear_metadata_buffer(mParameters);
11452 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11453 LOGE("Failed to set hal version in the parameters");
11454 return BAD_VALUE;
11455 }
11456
11457 /*we need to update the frame number in the parameters*/
11458 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11459 request->frame_number)) {
11460 LOGE("Failed to set the frame number in the parameters");
11461 return BAD_VALUE;
11462 }
11463
11464 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011465 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011466 LOGE("Failed to set stream type mask in the parameters");
11467 return BAD_VALUE;
11468 }
11469
11470 if (mUpdateDebugLevel) {
11471 uint32_t dummyDebugLevel = 0;
11472 /* The value of dummyDebugLevel is irrelavent. On
11473 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11474 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11475 dummyDebugLevel)) {
11476 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11477 return BAD_VALUE;
11478 }
11479 mUpdateDebugLevel = false;
11480 }
11481
11482 if(request->settings != NULL){
11483 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11484 if (blob_request)
11485 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11486 }
11487
11488 return rc;
11489}
11490
11491/*===========================================================================
11492 * FUNCTION : setReprocParameters
11493 *
11494 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11495 * return it.
11496 *
11497 * PARAMETERS :
11498 * @request : request that needs to be serviced
11499 *
11500 * RETURN : success: NO_ERROR
11501 * failure:
11502 *==========================================================================*/
11503int32_t QCamera3HardwareInterface::setReprocParameters(
11504 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11505 uint32_t snapshotStreamId)
11506{
11507 /*translate from camera_metadata_t type to parm_type_t*/
11508 int rc = 0;
11509
11510 if (NULL == request->settings){
11511 LOGE("Reprocess settings cannot be NULL");
11512 return BAD_VALUE;
11513 }
11514
11515 if (NULL == reprocParam) {
11516 LOGE("Invalid reprocessing metadata buffer");
11517 return BAD_VALUE;
11518 }
11519 clear_metadata_buffer(reprocParam);
11520
11521 /*we need to update the frame number in the parameters*/
11522 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11523 request->frame_number)) {
11524 LOGE("Failed to set the frame number in the parameters");
11525 return BAD_VALUE;
11526 }
11527
11528 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11529 if (rc < 0) {
11530 LOGE("Failed to translate reproc request");
11531 return rc;
11532 }
11533
11534 CameraMetadata frame_settings;
11535 frame_settings = request->settings;
11536 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11537 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11538 int32_t *crop_count =
11539 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11540 int32_t *crop_data =
11541 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11542 int32_t *roi_map =
11543 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11544 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11545 cam_crop_data_t crop_meta;
11546 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11547 crop_meta.num_of_streams = 1;
11548 crop_meta.crop_info[0].crop.left = crop_data[0];
11549 crop_meta.crop_info[0].crop.top = crop_data[1];
11550 crop_meta.crop_info[0].crop.width = crop_data[2];
11551 crop_meta.crop_info[0].crop.height = crop_data[3];
11552
11553 crop_meta.crop_info[0].roi_map.left =
11554 roi_map[0];
11555 crop_meta.crop_info[0].roi_map.top =
11556 roi_map[1];
11557 crop_meta.crop_info[0].roi_map.width =
11558 roi_map[2];
11559 crop_meta.crop_info[0].roi_map.height =
11560 roi_map[3];
11561
11562 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11563 rc = BAD_VALUE;
11564 }
11565 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11566 request->input_buffer->stream,
11567 crop_meta.crop_info[0].crop.left,
11568 crop_meta.crop_info[0].crop.top,
11569 crop_meta.crop_info[0].crop.width,
11570 crop_meta.crop_info[0].crop.height);
11571 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11572 request->input_buffer->stream,
11573 crop_meta.crop_info[0].roi_map.left,
11574 crop_meta.crop_info[0].roi_map.top,
11575 crop_meta.crop_info[0].roi_map.width,
11576 crop_meta.crop_info[0].roi_map.height);
11577 } else {
11578 LOGE("Invalid reprocess crop count %d!", *crop_count);
11579 }
11580 } else {
11581 LOGE("No crop data from matching output stream");
11582 }
11583
11584 /* These settings are not needed for regular requests so handle them specially for
11585 reprocess requests; information needed for EXIF tags */
11586 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11587 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11588 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11589 if (NAME_NOT_FOUND != val) {
11590 uint32_t flashMode = (uint32_t)val;
11591 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11592 rc = BAD_VALUE;
11593 }
11594 } else {
11595 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11596 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11597 }
11598 } else {
11599 LOGH("No flash mode in reprocess settings");
11600 }
11601
11602 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11603 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11604 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11605 rc = BAD_VALUE;
11606 }
11607 } else {
11608 LOGH("No flash state in reprocess settings");
11609 }
11610
11611 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11612 uint8_t *reprocessFlags =
11613 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11614 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11615 *reprocessFlags)) {
11616 rc = BAD_VALUE;
11617 }
11618 }
11619
Thierry Strudel54dc9782017-02-15 12:12:10 -080011620 // Add exif debug data to internal metadata
11621 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11622 mm_jpeg_debug_exif_params_t *debug_params =
11623 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11624 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11625 // AE
11626 if (debug_params->ae_debug_params_valid == TRUE) {
11627 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11628 debug_params->ae_debug_params);
11629 }
11630 // AWB
11631 if (debug_params->awb_debug_params_valid == TRUE) {
11632 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11633 debug_params->awb_debug_params);
11634 }
11635 // AF
11636 if (debug_params->af_debug_params_valid == TRUE) {
11637 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11638 debug_params->af_debug_params);
11639 }
11640 // ASD
11641 if (debug_params->asd_debug_params_valid == TRUE) {
11642 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11643 debug_params->asd_debug_params);
11644 }
11645 // Stats
11646 if (debug_params->stats_debug_params_valid == TRUE) {
11647 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11648 debug_params->stats_debug_params);
11649 }
11650 // BE Stats
11651 if (debug_params->bestats_debug_params_valid == TRUE) {
11652 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11653 debug_params->bestats_debug_params);
11654 }
11655 // BHIST
11656 if (debug_params->bhist_debug_params_valid == TRUE) {
11657 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11658 debug_params->bhist_debug_params);
11659 }
11660 // 3A Tuning
11661 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11662 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11663 debug_params->q3a_tuning_debug_params);
11664 }
11665 }
11666
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011667 // Add metadata which reprocess needs
11668 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11669 cam_reprocess_info_t *repro_info =
11670 (cam_reprocess_info_t *)frame_settings.find
11671 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011672 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011673 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011674 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011675 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011676 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011677 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011678 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011679 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011680 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011681 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011682 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011683 repro_info->pipeline_flip);
11684 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11685 repro_info->af_roi);
11686 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11687 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011688 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11689 CAM_INTF_PARM_ROTATION metadata then has been added in
11690 translateToHalMetadata. HAL need to keep this new rotation
11691 metadata. Otherwise, the old rotation info saved in the vendor tag
11692 would be used */
11693 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11694 CAM_INTF_PARM_ROTATION, reprocParam) {
11695 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11696 } else {
11697 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011698 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011699 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011700 }
11701
11702 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11703 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11704 roi.width and roi.height would be the final JPEG size.
11705 For now, HAL only checks this for reprocess request */
11706 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11707 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11708 uint8_t *enable =
11709 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11710 if (*enable == TRUE) {
11711 int32_t *crop_data =
11712 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11713 cam_stream_crop_info_t crop_meta;
11714 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11715 crop_meta.stream_id = 0;
11716 crop_meta.crop.left = crop_data[0];
11717 crop_meta.crop.top = crop_data[1];
11718 crop_meta.crop.width = crop_data[2];
11719 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011720 // The JPEG crop roi should match cpp output size
11721 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11722 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11723 crop_meta.roi_map.left = 0;
11724 crop_meta.roi_map.top = 0;
11725 crop_meta.roi_map.width = cpp_crop->crop.width;
11726 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011727 }
11728 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11729 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011730 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011731 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011732 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11733 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011734 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011735 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11736
11737 // Add JPEG scale information
11738 cam_dimension_t scale_dim;
11739 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11740 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11741 int32_t *roi =
11742 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11743 scale_dim.width = roi[2];
11744 scale_dim.height = roi[3];
11745 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11746 scale_dim);
11747 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11748 scale_dim.width, scale_dim.height, mCameraId);
11749 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011750 }
11751 }
11752
11753 return rc;
11754}
11755
11756/*===========================================================================
11757 * FUNCTION : saveRequestSettings
11758 *
11759 * DESCRIPTION: Add any settings that might have changed to the request settings
11760 * and save the settings to be applied on the frame
11761 *
11762 * PARAMETERS :
11763 * @jpegMetadata : the extracted and/or modified jpeg metadata
11764 * @request : request with initial settings
11765 *
11766 * RETURN :
11767 * camera_metadata_t* : pointer to the saved request settings
11768 *==========================================================================*/
11769camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11770 const CameraMetadata &jpegMetadata,
11771 camera3_capture_request_t *request)
11772{
11773 camera_metadata_t *resultMetadata;
11774 CameraMetadata camMetadata;
11775 camMetadata = request->settings;
11776
11777 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11778 int32_t thumbnail_size[2];
11779 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11780 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11781 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11782 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11783 }
11784
11785 if (request->input_buffer != NULL) {
11786 uint8_t reprocessFlags = 1;
11787 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11788 (uint8_t*)&reprocessFlags,
11789 sizeof(reprocessFlags));
11790 }
11791
11792 resultMetadata = camMetadata.release();
11793 return resultMetadata;
11794}
11795
11796/*===========================================================================
11797 * FUNCTION : setHalFpsRange
11798 *
11799 * DESCRIPTION: set FPS range parameter
11800 *
11801 *
11802 * PARAMETERS :
11803 * @settings : Metadata from framework
11804 * @hal_metadata: Metadata buffer
11805 *
11806 *
11807 * RETURN : success: NO_ERROR
11808 * failure:
11809 *==========================================================================*/
11810int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11811 metadata_buffer_t *hal_metadata)
11812{
11813 int32_t rc = NO_ERROR;
11814 cam_fps_range_t fps_range;
11815 fps_range.min_fps = (float)
11816 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11817 fps_range.max_fps = (float)
11818 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11819 fps_range.video_min_fps = fps_range.min_fps;
11820 fps_range.video_max_fps = fps_range.max_fps;
11821
11822 LOGD("aeTargetFpsRange fps: [%f %f]",
11823 fps_range.min_fps, fps_range.max_fps);
11824 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11825 * follows:
11826 * ---------------------------------------------------------------|
11827 * Video stream is absent in configure_streams |
11828 * (Camcorder preview before the first video record |
11829 * ---------------------------------------------------------------|
11830 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11831 * | | | vid_min/max_fps|
11832 * ---------------------------------------------------------------|
11833 * NO | [ 30, 240] | 240 | [240, 240] |
11834 * |-------------|-------------|----------------|
11835 * | [240, 240] | 240 | [240, 240] |
11836 * ---------------------------------------------------------------|
11837 * Video stream is present in configure_streams |
11838 * ---------------------------------------------------------------|
11839 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11840 * | | | vid_min/max_fps|
11841 * ---------------------------------------------------------------|
11842 * NO | [ 30, 240] | 240 | [240, 240] |
11843 * (camcorder prev |-------------|-------------|----------------|
11844 * after video rec | [240, 240] | 240 | [240, 240] |
11845 * is stopped) | | | |
11846 * ---------------------------------------------------------------|
11847 * YES | [ 30, 240] | 240 | [240, 240] |
11848 * |-------------|-------------|----------------|
11849 * | [240, 240] | 240 | [240, 240] |
11850 * ---------------------------------------------------------------|
11851 * When Video stream is absent in configure_streams,
11852 * preview fps = sensor_fps / batchsize
11853 * Eg: for 240fps at batchSize 4, preview = 60fps
11854 * for 120fps at batchSize 4, preview = 30fps
11855 *
11856 * When video stream is present in configure_streams, preview fps is as per
11857 * the ratio of preview buffers to video buffers requested in process
11858 * capture request
11859 */
11860 mBatchSize = 0;
11861 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11862 fps_range.min_fps = fps_range.video_max_fps;
11863 fps_range.video_min_fps = fps_range.video_max_fps;
11864 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11865 fps_range.max_fps);
11866 if (NAME_NOT_FOUND != val) {
11867 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11869 return BAD_VALUE;
11870 }
11871
11872 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11873 /* If batchmode is currently in progress and the fps changes,
11874 * set the flag to restart the sensor */
11875 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11876 (mHFRVideoFps != fps_range.max_fps)) {
11877 mNeedSensorRestart = true;
11878 }
11879 mHFRVideoFps = fps_range.max_fps;
11880 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11881 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11882 mBatchSize = MAX_HFR_BATCH_SIZE;
11883 }
11884 }
11885 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11886
11887 }
11888 } else {
11889 /* HFR mode is session param in backend/ISP. This should be reset when
11890 * in non-HFR mode */
11891 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11892 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11893 return BAD_VALUE;
11894 }
11895 }
11896 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11897 return BAD_VALUE;
11898 }
11899 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11900 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11901 return rc;
11902}
11903
11904/*===========================================================================
11905 * FUNCTION : translateToHalMetadata
11906 *
11907 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11908 *
11909 *
11910 * PARAMETERS :
11911 * @request : request sent from framework
11912 *
11913 *
11914 * RETURN : success: NO_ERROR
11915 * failure:
11916 *==========================================================================*/
11917int QCamera3HardwareInterface::translateToHalMetadata
11918 (const camera3_capture_request_t *request,
11919 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011920 uint32_t snapshotStreamId) {
11921 if (request == nullptr || hal_metadata == nullptr) {
11922 return BAD_VALUE;
11923 }
11924
11925 int64_t minFrameDuration = getMinFrameDuration(request);
11926
11927 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11928 minFrameDuration);
11929}
11930
11931int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11932 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11933 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11934
Thierry Strudel3d639192016-09-09 11:52:26 -070011935 int rc = 0;
11936 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011937 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011938
11939 /* Do not change the order of the following list unless you know what you are
11940 * doing.
11941 * The order is laid out in such a way that parameters in the front of the table
11942 * may be used to override the parameters later in the table. Examples are:
11943 * 1. META_MODE should precede AEC/AWB/AF MODE
11944 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11945 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11946 * 4. Any mode should precede it's corresponding settings
11947 */
11948 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11949 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11951 rc = BAD_VALUE;
11952 }
11953 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11954 if (rc != NO_ERROR) {
11955 LOGE("extractSceneMode failed");
11956 }
11957 }
11958
11959 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11960 uint8_t fwk_aeMode =
11961 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11962 uint8_t aeMode;
11963 int32_t redeye;
11964
11965 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11966 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011967 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11968 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011969 } else {
11970 aeMode = CAM_AE_MODE_ON;
11971 }
11972 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11973 redeye = 1;
11974 } else {
11975 redeye = 0;
11976 }
11977
11978 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11979 fwk_aeMode);
11980 if (NAME_NOT_FOUND != val) {
11981 int32_t flashMode = (int32_t)val;
11982 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11983 }
11984
11985 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11986 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11987 rc = BAD_VALUE;
11988 }
11989 }
11990
11991 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11992 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11993 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11994 fwk_whiteLevel);
11995 if (NAME_NOT_FOUND != val) {
11996 uint8_t whiteLevel = (uint8_t)val;
11997 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11998 rc = BAD_VALUE;
11999 }
12000 }
12001 }
12002
12003 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12004 uint8_t fwk_cacMode =
12005 frame_settings.find(
12006 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12007 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12008 fwk_cacMode);
12009 if (NAME_NOT_FOUND != val) {
12010 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12011 bool entryAvailable = FALSE;
12012 // Check whether Frameworks set CAC mode is supported in device or not
12013 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12014 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12015 entryAvailable = TRUE;
12016 break;
12017 }
12018 }
12019 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12020 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12021 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12022 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12023 if (entryAvailable == FALSE) {
12024 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12025 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12026 } else {
12027 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12028 // High is not supported and so set the FAST as spec say's underlying
12029 // device implementation can be the same for both modes.
12030 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12031 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12032 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12033 // in order to avoid the fps drop due to high quality
12034 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12035 } else {
12036 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12037 }
12038 }
12039 }
12040 LOGD("Final cacMode is %d", cacMode);
12041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12042 rc = BAD_VALUE;
12043 }
12044 } else {
12045 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12046 }
12047 }
12048
Thierry Strudel2896d122017-02-23 19:18:03 -080012049 char af_value[PROPERTY_VALUE_MAX];
12050 property_get("persist.camera.af.infinity", af_value, "0");
12051
Jason Lee84ae9972017-02-24 13:24:24 -080012052 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080012053 if (atoi(af_value) == 0) {
12054 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012055 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012056 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12057 fwk_focusMode);
12058 if (NAME_NOT_FOUND != val) {
12059 uint8_t focusMode = (uint8_t)val;
12060 LOGD("set focus mode %d", focusMode);
12061 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12062 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12063 rc = BAD_VALUE;
12064 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012065 }
12066 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012067 } else {
12068 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12069 LOGE("Focus forced to infinity %d", focusMode);
12070 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12071 rc = BAD_VALUE;
12072 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012073 }
12074
Jason Lee84ae9972017-02-24 13:24:24 -080012075 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12076 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012077 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12078 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12079 focalDistance)) {
12080 rc = BAD_VALUE;
12081 }
12082 }
12083
12084 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12085 uint8_t fwk_antibandingMode =
12086 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12087 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12088 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12089 if (NAME_NOT_FOUND != val) {
12090 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012091 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12092 if (m60HzZone) {
12093 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12094 } else {
12095 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12096 }
12097 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12099 hal_antibandingMode)) {
12100 rc = BAD_VALUE;
12101 }
12102 }
12103 }
12104
12105 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12106 int32_t expCompensation = frame_settings.find(
12107 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12108 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12109 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12110 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12111 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012112 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012113 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12114 expCompensation)) {
12115 rc = BAD_VALUE;
12116 }
12117 }
12118
12119 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12120 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12121 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12122 rc = BAD_VALUE;
12123 }
12124 }
12125 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12126 rc = setHalFpsRange(frame_settings, hal_metadata);
12127 if (rc != NO_ERROR) {
12128 LOGE("setHalFpsRange failed");
12129 }
12130 }
12131
12132 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12133 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12134 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12135 rc = BAD_VALUE;
12136 }
12137 }
12138
12139 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12140 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12141 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12142 fwk_effectMode);
12143 if (NAME_NOT_FOUND != val) {
12144 uint8_t effectMode = (uint8_t)val;
12145 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12146 rc = BAD_VALUE;
12147 }
12148 }
12149 }
12150
12151 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12152 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12153 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12154 colorCorrectMode)) {
12155 rc = BAD_VALUE;
12156 }
12157 }
12158
12159 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12160 cam_color_correct_gains_t colorCorrectGains;
12161 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12162 colorCorrectGains.gains[i] =
12163 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12164 }
12165 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12166 colorCorrectGains)) {
12167 rc = BAD_VALUE;
12168 }
12169 }
12170
12171 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12172 cam_color_correct_matrix_t colorCorrectTransform;
12173 cam_rational_type_t transform_elem;
12174 size_t num = 0;
12175 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12176 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12177 transform_elem.numerator =
12178 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12179 transform_elem.denominator =
12180 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12181 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12182 num++;
12183 }
12184 }
12185 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12186 colorCorrectTransform)) {
12187 rc = BAD_VALUE;
12188 }
12189 }
12190
12191 cam_trigger_t aecTrigger;
12192 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12193 aecTrigger.trigger_id = -1;
12194 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12195 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12196 aecTrigger.trigger =
12197 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12198 aecTrigger.trigger_id =
12199 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12200 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12201 aecTrigger)) {
12202 rc = BAD_VALUE;
12203 }
12204 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12205 aecTrigger.trigger, aecTrigger.trigger_id);
12206 }
12207
12208 /*af_trigger must come with a trigger id*/
12209 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12210 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12211 cam_trigger_t af_trigger;
12212 af_trigger.trigger =
12213 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12214 af_trigger.trigger_id =
12215 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12216 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12217 rc = BAD_VALUE;
12218 }
12219 LOGD("AfTrigger: %d AfTriggerID: %d",
12220 af_trigger.trigger, af_trigger.trigger_id);
12221 }
12222
12223 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12224 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12225 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12226 rc = BAD_VALUE;
12227 }
12228 }
12229 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12230 cam_edge_application_t edge_application;
12231 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012232
Thierry Strudel3d639192016-09-09 11:52:26 -070012233 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12234 edge_application.sharpness = 0;
12235 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012236 edge_application.sharpness =
12237 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12238 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12239 int32_t sharpness =
12240 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12241 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12242 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12243 LOGD("Setting edge mode sharpness %d", sharpness);
12244 edge_application.sharpness = sharpness;
12245 }
12246 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012247 }
12248 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12249 rc = BAD_VALUE;
12250 }
12251 }
12252
12253 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12254 int32_t respectFlashMode = 1;
12255 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12256 uint8_t fwk_aeMode =
12257 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012258 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12259 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12260 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012261 respectFlashMode = 0;
12262 LOGH("AE Mode controls flash, ignore android.flash.mode");
12263 }
12264 }
12265 if (respectFlashMode) {
12266 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12267 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12268 LOGH("flash mode after mapping %d", val);
12269 // To check: CAM_INTF_META_FLASH_MODE usage
12270 if (NAME_NOT_FOUND != val) {
12271 uint8_t flashMode = (uint8_t)val;
12272 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12273 rc = BAD_VALUE;
12274 }
12275 }
12276 }
12277 }
12278
12279 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12280 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12281 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12282 rc = BAD_VALUE;
12283 }
12284 }
12285
12286 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12287 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12288 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12289 flashFiringTime)) {
12290 rc = BAD_VALUE;
12291 }
12292 }
12293
12294 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12295 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12296 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12297 hotPixelMode)) {
12298 rc = BAD_VALUE;
12299 }
12300 }
12301
12302 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12303 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12304 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12305 lensAperture)) {
12306 rc = BAD_VALUE;
12307 }
12308 }
12309
12310 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12311 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12312 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12313 filterDensity)) {
12314 rc = BAD_VALUE;
12315 }
12316 }
12317
12318 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12319 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12320 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12321 focalLength)) {
12322 rc = BAD_VALUE;
12323 }
12324 }
12325
12326 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12327 uint8_t optStabMode =
12328 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12329 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12330 optStabMode)) {
12331 rc = BAD_VALUE;
12332 }
12333 }
12334
12335 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12336 uint8_t videoStabMode =
12337 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12338 LOGD("videoStabMode from APP = %d", videoStabMode);
12339 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12340 videoStabMode)) {
12341 rc = BAD_VALUE;
12342 }
12343 }
12344
12345
12346 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12347 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12348 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12349 noiseRedMode)) {
12350 rc = BAD_VALUE;
12351 }
12352 }
12353
12354 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12355 float reprocessEffectiveExposureFactor =
12356 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12357 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12358 reprocessEffectiveExposureFactor)) {
12359 rc = BAD_VALUE;
12360 }
12361 }
12362
12363 cam_crop_region_t scalerCropRegion;
12364 bool scalerCropSet = false;
12365 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12366 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12367 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12368 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12369 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12370
12371 // Map coordinate system from active array to sensor output.
12372 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12373 scalerCropRegion.width, scalerCropRegion.height);
12374
12375 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12376 scalerCropRegion)) {
12377 rc = BAD_VALUE;
12378 }
12379 scalerCropSet = true;
12380 }
12381
12382 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12383 int64_t sensorExpTime =
12384 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12385 LOGD("setting sensorExpTime %lld", sensorExpTime);
12386 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12387 sensorExpTime)) {
12388 rc = BAD_VALUE;
12389 }
12390 }
12391
12392 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12393 int64_t sensorFrameDuration =
12394 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012395 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12396 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12397 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12398 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12399 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12400 sensorFrameDuration)) {
12401 rc = BAD_VALUE;
12402 }
12403 }
12404
12405 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12406 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12407 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12408 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12409 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12410 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12411 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12412 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12413 sensorSensitivity)) {
12414 rc = BAD_VALUE;
12415 }
12416 }
12417
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012418#ifndef USE_HAL_3_3
12419 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12420 int32_t ispSensitivity =
12421 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12422 if (ispSensitivity <
12423 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12424 ispSensitivity =
12425 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12426 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12427 }
12428 if (ispSensitivity >
12429 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12430 ispSensitivity =
12431 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12432 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12433 }
12434 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12435 ispSensitivity)) {
12436 rc = BAD_VALUE;
12437 }
12438 }
12439#endif
12440
Thierry Strudel3d639192016-09-09 11:52:26 -070012441 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12442 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12443 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12444 rc = BAD_VALUE;
12445 }
12446 }
12447
12448 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12449 uint8_t fwk_facedetectMode =
12450 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12451
12452 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12453 fwk_facedetectMode);
12454
12455 if (NAME_NOT_FOUND != val) {
12456 uint8_t facedetectMode = (uint8_t)val;
12457 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12458 facedetectMode)) {
12459 rc = BAD_VALUE;
12460 }
12461 }
12462 }
12463
Thierry Strudel54dc9782017-02-15 12:12:10 -080012464 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012465 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012466 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012467 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12468 histogramMode)) {
12469 rc = BAD_VALUE;
12470 }
12471 }
12472
12473 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12474 uint8_t sharpnessMapMode =
12475 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12476 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12477 sharpnessMapMode)) {
12478 rc = BAD_VALUE;
12479 }
12480 }
12481
12482 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12483 uint8_t tonemapMode =
12484 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12485 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12486 rc = BAD_VALUE;
12487 }
12488 }
12489 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12490 /*All tonemap channels will have the same number of points*/
12491 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12492 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12493 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12494 cam_rgb_tonemap_curves tonemapCurves;
12495 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12496 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12497 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12498 tonemapCurves.tonemap_points_cnt,
12499 CAM_MAX_TONEMAP_CURVE_SIZE);
12500 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12501 }
12502
12503 /* ch0 = G*/
12504 size_t point = 0;
12505 cam_tonemap_curve_t tonemapCurveGreen;
12506 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12507 for (size_t j = 0; j < 2; j++) {
12508 tonemapCurveGreen.tonemap_points[i][j] =
12509 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12510 point++;
12511 }
12512 }
12513 tonemapCurves.curves[0] = tonemapCurveGreen;
12514
12515 /* ch 1 = B */
12516 point = 0;
12517 cam_tonemap_curve_t tonemapCurveBlue;
12518 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12519 for (size_t j = 0; j < 2; j++) {
12520 tonemapCurveBlue.tonemap_points[i][j] =
12521 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12522 point++;
12523 }
12524 }
12525 tonemapCurves.curves[1] = tonemapCurveBlue;
12526
12527 /* ch 2 = R */
12528 point = 0;
12529 cam_tonemap_curve_t tonemapCurveRed;
12530 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12531 for (size_t j = 0; j < 2; j++) {
12532 tonemapCurveRed.tonemap_points[i][j] =
12533 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12534 point++;
12535 }
12536 }
12537 tonemapCurves.curves[2] = tonemapCurveRed;
12538
12539 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12540 tonemapCurves)) {
12541 rc = BAD_VALUE;
12542 }
12543 }
12544
12545 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12546 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12547 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12548 captureIntent)) {
12549 rc = BAD_VALUE;
12550 }
12551 }
12552
12553 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12554 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12555 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12556 blackLevelLock)) {
12557 rc = BAD_VALUE;
12558 }
12559 }
12560
12561 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12562 uint8_t lensShadingMapMode =
12563 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12564 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12565 lensShadingMapMode)) {
12566 rc = BAD_VALUE;
12567 }
12568 }
12569
12570 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12571 cam_area_t roi;
12572 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012573 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012574
12575 // Map coordinate system from active array to sensor output.
12576 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12577 roi.rect.height);
12578
12579 if (scalerCropSet) {
12580 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12581 }
12582 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12583 rc = BAD_VALUE;
12584 }
12585 }
12586
12587 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12588 cam_area_t roi;
12589 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012590 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012591
12592 // Map coordinate system from active array to sensor output.
12593 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12594 roi.rect.height);
12595
12596 if (scalerCropSet) {
12597 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12598 }
12599 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12600 rc = BAD_VALUE;
12601 }
12602 }
12603
12604 // CDS for non-HFR non-video mode
12605 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12606 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12607 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12608 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12609 LOGE("Invalid CDS mode %d!", *fwk_cds);
12610 } else {
12611 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12612 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12613 rc = BAD_VALUE;
12614 }
12615 }
12616 }
12617
Thierry Strudel04e026f2016-10-10 11:27:36 -070012618 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012619 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012620 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012621 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12622 }
12623 if (m_bVideoHdrEnabled)
12624 vhdr = CAM_VIDEO_HDR_MODE_ON;
12625
Thierry Strudel54dc9782017-02-15 12:12:10 -080012626 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12627
12628 if(vhdr != curr_hdr_state)
12629 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12630
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012631 rc = setVideoHdrMode(mParameters, vhdr);
12632 if (rc != NO_ERROR) {
12633 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012634 }
12635
12636 //IR
12637 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12638 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12639 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012640 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12641 uint8_t isIRon = 0;
12642
12643 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012644 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12645 LOGE("Invalid IR mode %d!", fwk_ir);
12646 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012647 if(isIRon != curr_ir_state )
12648 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12649
Thierry Strudel04e026f2016-10-10 11:27:36 -070012650 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12651 CAM_INTF_META_IR_MODE, fwk_ir)) {
12652 rc = BAD_VALUE;
12653 }
12654 }
12655 }
12656
Thierry Strudel54dc9782017-02-15 12:12:10 -080012657 //Binning Correction Mode
12658 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12659 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12660 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12661 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12662 || (0 > fwk_binning_correction)) {
12663 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12664 } else {
12665 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12666 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12667 rc = BAD_VALUE;
12668 }
12669 }
12670 }
12671
Thierry Strudel269c81a2016-10-12 12:13:59 -070012672 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12673 float aec_speed;
12674 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12675 LOGD("AEC Speed :%f", aec_speed);
12676 if ( aec_speed < 0 ) {
12677 LOGE("Invalid AEC mode %f!", aec_speed);
12678 } else {
12679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12680 aec_speed)) {
12681 rc = BAD_VALUE;
12682 }
12683 }
12684 }
12685
12686 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12687 float awb_speed;
12688 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12689 LOGD("AWB Speed :%f", awb_speed);
12690 if ( awb_speed < 0 ) {
12691 LOGE("Invalid AWB mode %f!", awb_speed);
12692 } else {
12693 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12694 awb_speed)) {
12695 rc = BAD_VALUE;
12696 }
12697 }
12698 }
12699
Thierry Strudel3d639192016-09-09 11:52:26 -070012700 // TNR
12701 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12702 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12703 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012704 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012705 cam_denoise_param_t tnr;
12706 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12707 tnr.process_plates =
12708 (cam_denoise_process_type_t)frame_settings.find(
12709 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12710 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012711
12712 if(b_TnrRequested != curr_tnr_state)
12713 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12714
Thierry Strudel3d639192016-09-09 11:52:26 -070012715 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12716 rc = BAD_VALUE;
12717 }
12718 }
12719
Thierry Strudel54dc9782017-02-15 12:12:10 -080012720 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012721 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012722 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012723 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12724 *exposure_metering_mode)) {
12725 rc = BAD_VALUE;
12726 }
12727 }
12728
Thierry Strudel3d639192016-09-09 11:52:26 -070012729 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12730 int32_t fwk_testPatternMode =
12731 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12732 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12733 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12734
12735 if (NAME_NOT_FOUND != testPatternMode) {
12736 cam_test_pattern_data_t testPatternData;
12737 memset(&testPatternData, 0, sizeof(testPatternData));
12738 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12739 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12740 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12741 int32_t *fwk_testPatternData =
12742 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12743 testPatternData.r = fwk_testPatternData[0];
12744 testPatternData.b = fwk_testPatternData[3];
12745 switch (gCamCapability[mCameraId]->color_arrangement) {
12746 case CAM_FILTER_ARRANGEMENT_RGGB:
12747 case CAM_FILTER_ARRANGEMENT_GRBG:
12748 testPatternData.gr = fwk_testPatternData[1];
12749 testPatternData.gb = fwk_testPatternData[2];
12750 break;
12751 case CAM_FILTER_ARRANGEMENT_GBRG:
12752 case CAM_FILTER_ARRANGEMENT_BGGR:
12753 testPatternData.gr = fwk_testPatternData[2];
12754 testPatternData.gb = fwk_testPatternData[1];
12755 break;
12756 default:
12757 LOGE("color arrangement %d is not supported",
12758 gCamCapability[mCameraId]->color_arrangement);
12759 break;
12760 }
12761 }
12762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12763 testPatternData)) {
12764 rc = BAD_VALUE;
12765 }
12766 } else {
12767 LOGE("Invalid framework sensor test pattern mode %d",
12768 fwk_testPatternMode);
12769 }
12770 }
12771
12772 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12773 size_t count = 0;
12774 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12775 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12776 gps_coords.data.d, gps_coords.count, count);
12777 if (gps_coords.count != count) {
12778 rc = BAD_VALUE;
12779 }
12780 }
12781
12782 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12783 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12784 size_t count = 0;
12785 const char *gps_methods_src = (const char *)
12786 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12787 memset(gps_methods, '\0', sizeof(gps_methods));
12788 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12789 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12790 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12791 if (GPS_PROCESSING_METHOD_SIZE != count) {
12792 rc = BAD_VALUE;
12793 }
12794 }
12795
12796 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12797 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12798 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12799 gps_timestamp)) {
12800 rc = BAD_VALUE;
12801 }
12802 }
12803
12804 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12805 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12806 cam_rotation_info_t rotation_info;
12807 if (orientation == 0) {
12808 rotation_info.rotation = ROTATE_0;
12809 } else if (orientation == 90) {
12810 rotation_info.rotation = ROTATE_90;
12811 } else if (orientation == 180) {
12812 rotation_info.rotation = ROTATE_180;
12813 } else if (orientation == 270) {
12814 rotation_info.rotation = ROTATE_270;
12815 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012816 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012817 rotation_info.streamId = snapshotStreamId;
12818 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12819 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12820 rc = BAD_VALUE;
12821 }
12822 }
12823
12824 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12825 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12826 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12827 rc = BAD_VALUE;
12828 }
12829 }
12830
12831 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12832 uint32_t thumb_quality = (uint32_t)
12833 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12834 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12835 thumb_quality)) {
12836 rc = BAD_VALUE;
12837 }
12838 }
12839
12840 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12841 cam_dimension_t dim;
12842 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12843 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12844 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12845 rc = BAD_VALUE;
12846 }
12847 }
12848
12849 // Internal metadata
12850 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12851 size_t count = 0;
12852 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12853 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12854 privatedata.data.i32, privatedata.count, count);
12855 if (privatedata.count != count) {
12856 rc = BAD_VALUE;
12857 }
12858 }
12859
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012860 // ISO/Exposure Priority
12861 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12862 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12863 cam_priority_mode_t mode =
12864 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12865 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12866 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12867 use_iso_exp_pty.previewOnly = FALSE;
12868 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12869 use_iso_exp_pty.value = *ptr;
12870
12871 if(CAM_ISO_PRIORITY == mode) {
12872 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12873 use_iso_exp_pty)) {
12874 rc = BAD_VALUE;
12875 }
12876 }
12877 else {
12878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12879 use_iso_exp_pty)) {
12880 rc = BAD_VALUE;
12881 }
12882 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012883
12884 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12885 rc = BAD_VALUE;
12886 }
12887 }
12888 } else {
12889 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12890 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012891 }
12892 }
12893
12894 // Saturation
12895 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12896 int32_t* use_saturation =
12897 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12898 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12899 rc = BAD_VALUE;
12900 }
12901 }
12902
Thierry Strudel3d639192016-09-09 11:52:26 -070012903 // EV step
12904 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12905 gCamCapability[mCameraId]->exp_compensation_step)) {
12906 rc = BAD_VALUE;
12907 }
12908
12909 // CDS info
12910 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12911 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12912 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12913
12914 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12915 CAM_INTF_META_CDS_DATA, *cdsData)) {
12916 rc = BAD_VALUE;
12917 }
12918 }
12919
Shuzhen Wang19463d72016-03-08 11:09:52 -080012920 // Hybrid AE
12921 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12922 uint8_t *hybrid_ae = (uint8_t *)
12923 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12924
12925 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12926 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12927 rc = BAD_VALUE;
12928 }
12929 }
12930
Shuzhen Wang14415f52016-11-16 18:26:18 -080012931 // Histogram
12932 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12933 uint8_t histogramMode =
12934 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12935 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12936 histogramMode)) {
12937 rc = BAD_VALUE;
12938 }
12939 }
12940
12941 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12942 int32_t histogramBins =
12943 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12944 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12945 histogramBins)) {
12946 rc = BAD_VALUE;
12947 }
12948 }
12949
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012950 // Tracking AF
12951 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12952 uint8_t trackingAfTrigger =
12953 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12954 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12955 trackingAfTrigger)) {
12956 rc = BAD_VALUE;
12957 }
12958 }
12959
Thierry Strudel3d639192016-09-09 11:52:26 -070012960 return rc;
12961}
12962
12963/*===========================================================================
12964 * FUNCTION : captureResultCb
12965 *
12966 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12967 *
12968 * PARAMETERS :
12969 * @frame : frame information from mm-camera-interface
12970 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12971 * @userdata: userdata
12972 *
12973 * RETURN : NONE
12974 *==========================================================================*/
12975void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12976 camera3_stream_buffer_t *buffer,
12977 uint32_t frame_number, bool isInputBuffer, void *userdata)
12978{
12979 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12980 if (hw == NULL) {
12981 LOGE("Invalid hw %p", hw);
12982 return;
12983 }
12984
12985 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12986 return;
12987}
12988
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012989/*===========================================================================
12990 * FUNCTION : setBufferErrorStatus
12991 *
12992 * DESCRIPTION: Callback handler for channels to report any buffer errors
12993 *
12994 * PARAMETERS :
12995 * @ch : Channel on which buffer error is reported from
12996 * @frame_number : frame number on which buffer error is reported on
12997 * @buffer_status : buffer error status
12998 * @userdata: userdata
12999 *
13000 * RETURN : NONE
13001 *==========================================================================*/
13002void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13003 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13004{
13005 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13006 if (hw == NULL) {
13007 LOGE("Invalid hw %p", hw);
13008 return;
13009 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013010
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013011 hw->setBufferErrorStatus(ch, frame_number, err);
13012 return;
13013}
13014
13015void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13016 uint32_t frameNumber, camera3_buffer_status_t err)
13017{
13018 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13019 pthread_mutex_lock(&mMutex);
13020
13021 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13022 if (req.frame_number != frameNumber)
13023 continue;
13024 for (auto& k : req.mPendingBufferList) {
13025 if(k.stream->priv == ch) {
13026 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13027 }
13028 }
13029 }
13030
13031 pthread_mutex_unlock(&mMutex);
13032 return;
13033}
Thierry Strudel3d639192016-09-09 11:52:26 -070013034/*===========================================================================
13035 * FUNCTION : initialize
13036 *
13037 * DESCRIPTION: Pass framework callback pointers to HAL
13038 *
13039 * PARAMETERS :
13040 *
13041 *
13042 * RETURN : Success : 0
13043 * Failure: -ENODEV
13044 *==========================================================================*/
13045
13046int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13047 const camera3_callback_ops_t *callback_ops)
13048{
13049 LOGD("E");
13050 QCamera3HardwareInterface *hw =
13051 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13052 if (!hw) {
13053 LOGE("NULL camera device");
13054 return -ENODEV;
13055 }
13056
13057 int rc = hw->initialize(callback_ops);
13058 LOGD("X");
13059 return rc;
13060}
13061
13062/*===========================================================================
13063 * FUNCTION : configure_streams
13064 *
13065 * DESCRIPTION:
13066 *
13067 * PARAMETERS :
13068 *
13069 *
13070 * RETURN : Success: 0
13071 * Failure: -EINVAL (if stream configuration is invalid)
13072 * -ENODEV (fatal error)
13073 *==========================================================================*/
13074
13075int QCamera3HardwareInterface::configure_streams(
13076 const struct camera3_device *device,
13077 camera3_stream_configuration_t *stream_list)
13078{
13079 LOGD("E");
13080 QCamera3HardwareInterface *hw =
13081 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13082 if (!hw) {
13083 LOGE("NULL camera device");
13084 return -ENODEV;
13085 }
13086 int rc = hw->configureStreams(stream_list);
13087 LOGD("X");
13088 return rc;
13089}
13090
13091/*===========================================================================
13092 * FUNCTION : construct_default_request_settings
13093 *
13094 * DESCRIPTION: Configure a settings buffer to meet the required use case
13095 *
13096 * PARAMETERS :
13097 *
13098 *
13099 * RETURN : Success: Return valid metadata
13100 * Failure: Return NULL
13101 *==========================================================================*/
13102const camera_metadata_t* QCamera3HardwareInterface::
13103 construct_default_request_settings(const struct camera3_device *device,
13104 int type)
13105{
13106
13107 LOGD("E");
13108 camera_metadata_t* fwk_metadata = NULL;
13109 QCamera3HardwareInterface *hw =
13110 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13111 if (!hw) {
13112 LOGE("NULL camera device");
13113 return NULL;
13114 }
13115
13116 fwk_metadata = hw->translateCapabilityToMetadata(type);
13117
13118 LOGD("X");
13119 return fwk_metadata;
13120}
13121
13122/*===========================================================================
13123 * FUNCTION : process_capture_request
13124 *
13125 * DESCRIPTION:
13126 *
13127 * PARAMETERS :
13128 *
13129 *
13130 * RETURN :
13131 *==========================================================================*/
13132int QCamera3HardwareInterface::process_capture_request(
13133 const struct camera3_device *device,
13134 camera3_capture_request_t *request)
13135{
13136 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013137 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013138 QCamera3HardwareInterface *hw =
13139 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13140 if (!hw) {
13141 LOGE("NULL camera device");
13142 return -EINVAL;
13143 }
13144
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013145 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013146 LOGD("X");
13147 return rc;
13148}
13149
13150/*===========================================================================
13151 * FUNCTION : dump
13152 *
13153 * DESCRIPTION:
13154 *
13155 * PARAMETERS :
13156 *
13157 *
13158 * RETURN :
13159 *==========================================================================*/
13160
13161void QCamera3HardwareInterface::dump(
13162 const struct camera3_device *device, int fd)
13163{
13164 /* Log level property is read when "adb shell dumpsys media.camera" is
13165 called so that the log level can be controlled without restarting
13166 the media server */
13167 getLogLevel();
13168
13169 LOGD("E");
13170 QCamera3HardwareInterface *hw =
13171 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13172 if (!hw) {
13173 LOGE("NULL camera device");
13174 return;
13175 }
13176
13177 hw->dump(fd);
13178 LOGD("X");
13179 return;
13180}
13181
13182/*===========================================================================
13183 * FUNCTION : flush
13184 *
13185 * DESCRIPTION:
13186 *
13187 * PARAMETERS :
13188 *
13189 *
13190 * RETURN :
13191 *==========================================================================*/
13192
13193int QCamera3HardwareInterface::flush(
13194 const struct camera3_device *device)
13195{
13196 int rc;
13197 LOGD("E");
13198 QCamera3HardwareInterface *hw =
13199 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13200 if (!hw) {
13201 LOGE("NULL camera device");
13202 return -EINVAL;
13203 }
13204
13205 pthread_mutex_lock(&hw->mMutex);
13206 // Validate current state
13207 switch (hw->mState) {
13208 case STARTED:
13209 /* valid state */
13210 break;
13211
13212 case ERROR:
13213 pthread_mutex_unlock(&hw->mMutex);
13214 hw->handleCameraDeviceError();
13215 return -ENODEV;
13216
13217 default:
13218 LOGI("Flush returned during state %d", hw->mState);
13219 pthread_mutex_unlock(&hw->mMutex);
13220 return 0;
13221 }
13222 pthread_mutex_unlock(&hw->mMutex);
13223
13224 rc = hw->flush(true /* restart channels */ );
13225 LOGD("X");
13226 return rc;
13227}
13228
13229/*===========================================================================
13230 * FUNCTION : close_camera_device
13231 *
13232 * DESCRIPTION:
13233 *
13234 * PARAMETERS :
13235 *
13236 *
13237 * RETURN :
13238 *==========================================================================*/
13239int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13240{
13241 int ret = NO_ERROR;
13242 QCamera3HardwareInterface *hw =
13243 reinterpret_cast<QCamera3HardwareInterface *>(
13244 reinterpret_cast<camera3_device_t *>(device)->priv);
13245 if (!hw) {
13246 LOGE("NULL camera device");
13247 return BAD_VALUE;
13248 }
13249
13250 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13251 delete hw;
13252 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013253 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013254 return ret;
13255}
13256
13257/*===========================================================================
13258 * FUNCTION : getWaveletDenoiseProcessPlate
13259 *
13260 * DESCRIPTION: query wavelet denoise process plate
13261 *
13262 * PARAMETERS : None
13263 *
13264 * RETURN : WNR prcocess plate value
13265 *==========================================================================*/
13266cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13267{
13268 char prop[PROPERTY_VALUE_MAX];
13269 memset(prop, 0, sizeof(prop));
13270 property_get("persist.denoise.process.plates", prop, "0");
13271 int processPlate = atoi(prop);
13272 switch(processPlate) {
13273 case 0:
13274 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13275 case 1:
13276 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13277 case 2:
13278 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13279 case 3:
13280 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13281 default:
13282 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13283 }
13284}
13285
13286
13287/*===========================================================================
13288 * FUNCTION : getTemporalDenoiseProcessPlate
13289 *
13290 * DESCRIPTION: query temporal denoise process plate
13291 *
13292 * PARAMETERS : None
13293 *
13294 * RETURN : TNR prcocess plate value
13295 *==========================================================================*/
13296cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13297{
13298 char prop[PROPERTY_VALUE_MAX];
13299 memset(prop, 0, sizeof(prop));
13300 property_get("persist.tnr.process.plates", prop, "0");
13301 int processPlate = atoi(prop);
13302 switch(processPlate) {
13303 case 0:
13304 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13305 case 1:
13306 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13307 case 2:
13308 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13309 case 3:
13310 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13311 default:
13312 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13313 }
13314}
13315
13316
13317/*===========================================================================
13318 * FUNCTION : extractSceneMode
13319 *
13320 * DESCRIPTION: Extract scene mode from frameworks set metadata
13321 *
13322 * PARAMETERS :
13323 * @frame_settings: CameraMetadata reference
13324 * @metaMode: ANDROID_CONTORL_MODE
13325 * @hal_metadata: hal metadata structure
13326 *
13327 * RETURN : None
13328 *==========================================================================*/
13329int32_t QCamera3HardwareInterface::extractSceneMode(
13330 const CameraMetadata &frame_settings, uint8_t metaMode,
13331 metadata_buffer_t *hal_metadata)
13332{
13333 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013334 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13335
13336 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13337 LOGD("Ignoring control mode OFF_KEEP_STATE");
13338 return NO_ERROR;
13339 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013340
13341 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13342 camera_metadata_ro_entry entry =
13343 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13344 if (0 == entry.count)
13345 return rc;
13346
13347 uint8_t fwk_sceneMode = entry.data.u8[0];
13348
13349 int val = lookupHalName(SCENE_MODES_MAP,
13350 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13351 fwk_sceneMode);
13352 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013353 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013354 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013355 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013356 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013357
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013358 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13359 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13360 }
13361
13362 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13363 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013364 cam_hdr_param_t hdr_params;
13365 hdr_params.hdr_enable = 1;
13366 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13367 hdr_params.hdr_need_1x = false;
13368 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13369 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13370 rc = BAD_VALUE;
13371 }
13372 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013373
Thierry Strudel3d639192016-09-09 11:52:26 -070013374 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13375 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13376 rc = BAD_VALUE;
13377 }
13378 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013379
13380 if (mForceHdrSnapshot) {
13381 cam_hdr_param_t hdr_params;
13382 hdr_params.hdr_enable = 1;
13383 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13384 hdr_params.hdr_need_1x = false;
13385 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13386 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13387 rc = BAD_VALUE;
13388 }
13389 }
13390
Thierry Strudel3d639192016-09-09 11:52:26 -070013391 return rc;
13392}
13393
13394/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013395 * FUNCTION : setVideoHdrMode
13396 *
13397 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13398 *
13399 * PARAMETERS :
13400 * @hal_metadata: hal metadata structure
13401 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13402 *
13403 * RETURN : None
13404 *==========================================================================*/
13405int32_t QCamera3HardwareInterface::setVideoHdrMode(
13406 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13407{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013408 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13409 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13410 }
13411
13412 LOGE("Invalid Video HDR mode %d!", vhdr);
13413 return BAD_VALUE;
13414}
13415
13416/*===========================================================================
13417 * FUNCTION : setSensorHDR
13418 *
13419 * DESCRIPTION: Enable/disable sensor HDR.
13420 *
13421 * PARAMETERS :
13422 * @hal_metadata: hal metadata structure
13423 * @enable: boolean whether to enable/disable sensor HDR
13424 *
13425 * RETURN : None
13426 *==========================================================================*/
13427int32_t QCamera3HardwareInterface::setSensorHDR(
13428 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13429{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013430 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013431 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13432
13433 if (enable) {
13434 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13435 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13436 #ifdef _LE_CAMERA_
13437 //Default to staggered HDR for IOT
13438 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13439 #else
13440 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13441 #endif
13442 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13443 }
13444
13445 bool isSupported = false;
13446 switch (sensor_hdr) {
13447 case CAM_SENSOR_HDR_IN_SENSOR:
13448 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13449 CAM_QCOM_FEATURE_SENSOR_HDR) {
13450 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013451 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013452 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013453 break;
13454 case CAM_SENSOR_HDR_ZIGZAG:
13455 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13456 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13457 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013458 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013459 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013460 break;
13461 case CAM_SENSOR_HDR_STAGGERED:
13462 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13463 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13464 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013465 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013466 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013467 break;
13468 case CAM_SENSOR_HDR_OFF:
13469 isSupported = true;
13470 LOGD("Turning off sensor HDR");
13471 break;
13472 default:
13473 LOGE("HDR mode %d not supported", sensor_hdr);
13474 rc = BAD_VALUE;
13475 break;
13476 }
13477
13478 if(isSupported) {
13479 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13480 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13481 rc = BAD_VALUE;
13482 } else {
13483 if(!isVideoHdrEnable)
13484 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013485 }
13486 }
13487 return rc;
13488}
13489
13490/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013491 * FUNCTION : needRotationReprocess
13492 *
13493 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13494 *
13495 * PARAMETERS : none
13496 *
13497 * RETURN : true: needed
13498 * false: no need
13499 *==========================================================================*/
13500bool QCamera3HardwareInterface::needRotationReprocess()
13501{
13502 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13503 // current rotation is not zero, and pp has the capability to process rotation
13504 LOGH("need do reprocess for rotation");
13505 return true;
13506 }
13507
13508 return false;
13509}
13510
13511/*===========================================================================
13512 * FUNCTION : needReprocess
13513 *
13514 * DESCRIPTION: if reprocess in needed
13515 *
13516 * PARAMETERS : none
13517 *
13518 * RETURN : true: needed
13519 * false: no need
13520 *==========================================================================*/
13521bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13522{
13523 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13524 // TODO: add for ZSL HDR later
13525 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13526 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13527 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13528 return true;
13529 } else {
13530 LOGH("already post processed frame");
13531 return false;
13532 }
13533 }
13534 return needRotationReprocess();
13535}
13536
13537/*===========================================================================
13538 * FUNCTION : needJpegExifRotation
13539 *
13540 * DESCRIPTION: if rotation from jpeg is needed
13541 *
13542 * PARAMETERS : none
13543 *
13544 * RETURN : true: needed
13545 * false: no need
13546 *==========================================================================*/
13547bool QCamera3HardwareInterface::needJpegExifRotation()
13548{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013549 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013550 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13551 LOGD("Need use Jpeg EXIF Rotation");
13552 return true;
13553 }
13554 return false;
13555}
13556
13557/*===========================================================================
13558 * FUNCTION : addOfflineReprocChannel
13559 *
13560 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13561 * coming from input channel
13562 *
13563 * PARAMETERS :
13564 * @config : reprocess configuration
13565 * @inputChHandle : pointer to the input (source) channel
13566 *
13567 *
13568 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13569 *==========================================================================*/
13570QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13571 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13572{
13573 int32_t rc = NO_ERROR;
13574 QCamera3ReprocessChannel *pChannel = NULL;
13575
13576 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013577 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13578 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013579 if (NULL == pChannel) {
13580 LOGE("no mem for reprocess channel");
13581 return NULL;
13582 }
13583
13584 rc = pChannel->initialize(IS_TYPE_NONE);
13585 if (rc != NO_ERROR) {
13586 LOGE("init reprocess channel failed, ret = %d", rc);
13587 delete pChannel;
13588 return NULL;
13589 }
13590
13591 // pp feature config
13592 cam_pp_feature_config_t pp_config;
13593 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13594
13595 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13596 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13597 & CAM_QCOM_FEATURE_DSDN) {
13598 //Use CPP CDS incase h/w supports it.
13599 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13600 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13601 }
13602 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13603 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13604 }
13605
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013606 if (config.hdr_param.hdr_enable) {
13607 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13608 pp_config.hdr_param = config.hdr_param;
13609 }
13610
13611 if (mForceHdrSnapshot) {
13612 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13613 pp_config.hdr_param.hdr_enable = 1;
13614 pp_config.hdr_param.hdr_need_1x = 0;
13615 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13616 }
13617
Thierry Strudel3d639192016-09-09 11:52:26 -070013618 rc = pChannel->addReprocStreamsFromSource(pp_config,
13619 config,
13620 IS_TYPE_NONE,
13621 mMetadataChannel);
13622
13623 if (rc != NO_ERROR) {
13624 delete pChannel;
13625 return NULL;
13626 }
13627 return pChannel;
13628}
13629
13630/*===========================================================================
13631 * FUNCTION : getMobicatMask
13632 *
13633 * DESCRIPTION: returns mobicat mask
13634 *
13635 * PARAMETERS : none
13636 *
13637 * RETURN : mobicat mask
13638 *
13639 *==========================================================================*/
13640uint8_t QCamera3HardwareInterface::getMobicatMask()
13641{
13642 return m_MobicatMask;
13643}
13644
13645/*===========================================================================
13646 * FUNCTION : setMobicat
13647 *
13648 * DESCRIPTION: set Mobicat on/off.
13649 *
13650 * PARAMETERS :
13651 * @params : none
13652 *
13653 * RETURN : int32_t type of status
13654 * NO_ERROR -- success
13655 * none-zero failure code
13656 *==========================================================================*/
13657int32_t QCamera3HardwareInterface::setMobicat()
13658{
13659 char value [PROPERTY_VALUE_MAX];
13660 property_get("persist.camera.mobicat", value, "0");
13661 int32_t ret = NO_ERROR;
13662 uint8_t enableMobi = (uint8_t)atoi(value);
13663
13664 if (enableMobi) {
13665 tune_cmd_t tune_cmd;
13666 tune_cmd.type = SET_RELOAD_CHROMATIX;
13667 tune_cmd.module = MODULE_ALL;
13668 tune_cmd.value = TRUE;
13669 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13670 CAM_INTF_PARM_SET_VFE_COMMAND,
13671 tune_cmd);
13672
13673 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13674 CAM_INTF_PARM_SET_PP_COMMAND,
13675 tune_cmd);
13676 }
13677 m_MobicatMask = enableMobi;
13678
13679 return ret;
13680}
13681
13682/*===========================================================================
13683* FUNCTION : getLogLevel
13684*
13685* DESCRIPTION: Reads the log level property into a variable
13686*
13687* PARAMETERS :
13688* None
13689*
13690* RETURN :
13691* None
13692*==========================================================================*/
13693void QCamera3HardwareInterface::getLogLevel()
13694{
13695 char prop[PROPERTY_VALUE_MAX];
13696 uint32_t globalLogLevel = 0;
13697
13698 property_get("persist.camera.hal.debug", prop, "0");
13699 int val = atoi(prop);
13700 if (0 <= val) {
13701 gCamHal3LogLevel = (uint32_t)val;
13702 }
13703
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013704 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013705 gKpiDebugLevel = atoi(prop);
13706
13707 property_get("persist.camera.global.debug", prop, "0");
13708 val = atoi(prop);
13709 if (0 <= val) {
13710 globalLogLevel = (uint32_t)val;
13711 }
13712
13713 /* Highest log level among hal.logs and global.logs is selected */
13714 if (gCamHal3LogLevel < globalLogLevel)
13715 gCamHal3LogLevel = globalLogLevel;
13716
13717 return;
13718}
13719
13720/*===========================================================================
13721 * FUNCTION : validateStreamRotations
13722 *
13723 * DESCRIPTION: Check if the rotations requested are supported
13724 *
13725 * PARAMETERS :
13726 * @stream_list : streams to be configured
13727 *
13728 * RETURN : NO_ERROR on success
13729 * -EINVAL on failure
13730 *
13731 *==========================================================================*/
13732int QCamera3HardwareInterface::validateStreamRotations(
13733 camera3_stream_configuration_t *streamList)
13734{
13735 int rc = NO_ERROR;
13736
13737 /*
13738 * Loop through all streams requested in configuration
13739 * Check if unsupported rotations have been requested on any of them
13740 */
13741 for (size_t j = 0; j < streamList->num_streams; j++){
13742 camera3_stream_t *newStream = streamList->streams[j];
13743
Emilian Peev35ceeed2017-06-29 11:58:56 -070013744 switch(newStream->rotation) {
13745 case CAMERA3_STREAM_ROTATION_0:
13746 case CAMERA3_STREAM_ROTATION_90:
13747 case CAMERA3_STREAM_ROTATION_180:
13748 case CAMERA3_STREAM_ROTATION_270:
13749 //Expected values
13750 break;
13751 default:
13752 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13753 "type:%d and stream format:%d", __func__,
13754 newStream->rotation, newStream->stream_type,
13755 newStream->format);
13756 return -EINVAL;
13757 }
13758
Thierry Strudel3d639192016-09-09 11:52:26 -070013759 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13760 bool isImplDef = (newStream->format ==
13761 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13762 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13763 isImplDef);
13764
13765 if (isRotated && (!isImplDef || isZsl)) {
13766 LOGE("Error: Unsupported rotation of %d requested for stream"
13767 "type:%d and stream format:%d",
13768 newStream->rotation, newStream->stream_type,
13769 newStream->format);
13770 rc = -EINVAL;
13771 break;
13772 }
13773 }
13774
13775 return rc;
13776}
13777
13778/*===========================================================================
13779* FUNCTION : getFlashInfo
13780*
13781* DESCRIPTION: Retrieve information about whether the device has a flash.
13782*
13783* PARAMETERS :
13784* @cameraId : Camera id to query
13785* @hasFlash : Boolean indicating whether there is a flash device
13786* associated with given camera
13787* @flashNode : If a flash device exists, this will be its device node.
13788*
13789* RETURN :
13790* None
13791*==========================================================================*/
13792void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13793 bool& hasFlash,
13794 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13795{
13796 cam_capability_t* camCapability = gCamCapability[cameraId];
13797 if (NULL == camCapability) {
13798 hasFlash = false;
13799 flashNode[0] = '\0';
13800 } else {
13801 hasFlash = camCapability->flash_available;
13802 strlcpy(flashNode,
13803 (char*)camCapability->flash_dev_name,
13804 QCAMERA_MAX_FILEPATH_LENGTH);
13805 }
13806}
13807
13808/*===========================================================================
13809* FUNCTION : getEepromVersionInfo
13810*
13811* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13812*
13813* PARAMETERS : None
13814*
13815* RETURN : string describing EEPROM version
13816* "\0" if no such info available
13817*==========================================================================*/
13818const char *QCamera3HardwareInterface::getEepromVersionInfo()
13819{
13820 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13821}
13822
13823/*===========================================================================
13824* FUNCTION : getLdafCalib
13825*
13826* DESCRIPTION: Retrieve Laser AF calibration data
13827*
13828* PARAMETERS : None
13829*
13830* RETURN : Two uint32_t describing laser AF calibration data
13831* NULL if none is available.
13832*==========================================================================*/
13833const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13834{
13835 if (mLdafCalibExist) {
13836 return &mLdafCalib[0];
13837 } else {
13838 return NULL;
13839 }
13840}
13841
13842/*===========================================================================
13843 * FUNCTION : dynamicUpdateMetaStreamInfo
13844 *
13845 * DESCRIPTION: This function:
13846 * (1) stops all the channels
13847 * (2) returns error on pending requests and buffers
13848 * (3) sends metastream_info in setparams
13849 * (4) starts all channels
13850 * This is useful when sensor has to be restarted to apply any
13851 * settings such as frame rate from a different sensor mode
13852 *
13853 * PARAMETERS : None
13854 *
13855 * RETURN : NO_ERROR on success
13856 * Error codes on failure
13857 *
13858 *==========================================================================*/
13859int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13860{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013861 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013862 int rc = NO_ERROR;
13863
13864 LOGD("E");
13865
13866 rc = stopAllChannels();
13867 if (rc < 0) {
13868 LOGE("stopAllChannels failed");
13869 return rc;
13870 }
13871
13872 rc = notifyErrorForPendingRequests();
13873 if (rc < 0) {
13874 LOGE("notifyErrorForPendingRequests failed");
13875 return rc;
13876 }
13877
13878 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13879 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13880 "Format:%d",
13881 mStreamConfigInfo.type[i],
13882 mStreamConfigInfo.stream_sizes[i].width,
13883 mStreamConfigInfo.stream_sizes[i].height,
13884 mStreamConfigInfo.postprocess_mask[i],
13885 mStreamConfigInfo.format[i]);
13886 }
13887
13888 /* Send meta stream info once again so that ISP can start */
13889 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13890 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13891 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13892 mParameters);
13893 if (rc < 0) {
13894 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13895 }
13896
13897 rc = startAllChannels();
13898 if (rc < 0) {
13899 LOGE("startAllChannels failed");
13900 return rc;
13901 }
13902
13903 LOGD("X");
13904 return rc;
13905}
13906
13907/*===========================================================================
13908 * FUNCTION : stopAllChannels
13909 *
13910 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13911 *
13912 * PARAMETERS : None
13913 *
13914 * RETURN : NO_ERROR on success
13915 * Error codes on failure
13916 *
13917 *==========================================================================*/
13918int32_t QCamera3HardwareInterface::stopAllChannels()
13919{
13920 int32_t rc = NO_ERROR;
13921
13922 LOGD("Stopping all channels");
13923 // Stop the Streams/Channels
13924 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13925 it != mStreamInfo.end(); it++) {
13926 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13927 if (channel) {
13928 channel->stop();
13929 }
13930 (*it)->status = INVALID;
13931 }
13932
13933 if (mSupportChannel) {
13934 mSupportChannel->stop();
13935 }
13936 if (mAnalysisChannel) {
13937 mAnalysisChannel->stop();
13938 }
13939 if (mRawDumpChannel) {
13940 mRawDumpChannel->stop();
13941 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013942 if (mHdrPlusRawSrcChannel) {
13943 mHdrPlusRawSrcChannel->stop();
13944 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013945 if (mMetadataChannel) {
13946 /* If content of mStreamInfo is not 0, there is metadata stream */
13947 mMetadataChannel->stop();
13948 }
13949
13950 LOGD("All channels stopped");
13951 return rc;
13952}
13953
13954/*===========================================================================
13955 * FUNCTION : startAllChannels
13956 *
13957 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13958 *
13959 * PARAMETERS : None
13960 *
13961 * RETURN : NO_ERROR on success
13962 * Error codes on failure
13963 *
13964 *==========================================================================*/
13965int32_t QCamera3HardwareInterface::startAllChannels()
13966{
13967 int32_t rc = NO_ERROR;
13968
13969 LOGD("Start all channels ");
13970 // Start the Streams/Channels
13971 if (mMetadataChannel) {
13972 /* If content of mStreamInfo is not 0, there is metadata stream */
13973 rc = mMetadataChannel->start();
13974 if (rc < 0) {
13975 LOGE("META channel start failed");
13976 return rc;
13977 }
13978 }
13979 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13980 it != mStreamInfo.end(); it++) {
13981 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13982 if (channel) {
13983 rc = channel->start();
13984 if (rc < 0) {
13985 LOGE("channel start failed");
13986 return rc;
13987 }
13988 }
13989 }
13990 if (mAnalysisChannel) {
13991 mAnalysisChannel->start();
13992 }
13993 if (mSupportChannel) {
13994 rc = mSupportChannel->start();
13995 if (rc < 0) {
13996 LOGE("Support channel start failed");
13997 return rc;
13998 }
13999 }
14000 if (mRawDumpChannel) {
14001 rc = mRawDumpChannel->start();
14002 if (rc < 0) {
14003 LOGE("RAW dump channel start failed");
14004 return rc;
14005 }
14006 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014007 if (mHdrPlusRawSrcChannel) {
14008 rc = mHdrPlusRawSrcChannel->start();
14009 if (rc < 0) {
14010 LOGE("HDR+ RAW channel start failed");
14011 return rc;
14012 }
14013 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014014
14015 LOGD("All channels started");
14016 return rc;
14017}
14018
14019/*===========================================================================
14020 * FUNCTION : notifyErrorForPendingRequests
14021 *
14022 * DESCRIPTION: This function sends error for all the pending requests/buffers
14023 *
14024 * PARAMETERS : None
14025 *
14026 * RETURN : Error codes
14027 * NO_ERROR on success
14028 *
14029 *==========================================================================*/
14030int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14031{
Emilian Peev7650c122017-01-19 08:24:33 -080014032 notifyErrorFoPendingDepthData(mDepthChannel);
14033
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014034 auto pendingRequest = mPendingRequestsList.begin();
14035 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014036
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014037 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14038 // buffers (for which buffers aren't sent yet).
14039 while (pendingRequest != mPendingRequestsList.end() ||
14040 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14041 if (pendingRequest == mPendingRequestsList.end() ||
14042 pendingBuffer->frame_number < pendingRequest->frame_number) {
14043 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14044 // with error.
14045 for (auto &info : pendingBuffer->mPendingBufferList) {
14046 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014047 camera3_notify_msg_t notify_msg;
14048 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14049 notify_msg.type = CAMERA3_MSG_ERROR;
14050 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014051 notify_msg.message.error.error_stream = info.stream;
14052 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014053 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014054
14055 camera3_stream_buffer_t buffer = {};
14056 buffer.acquire_fence = -1;
14057 buffer.release_fence = -1;
14058 buffer.buffer = info.buffer;
14059 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14060 buffer.stream = info.stream;
14061 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014062 }
14063
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014064 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14065 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14066 pendingBuffer->frame_number > pendingRequest->frame_number) {
14067 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014068 camera3_notify_msg_t notify_msg;
14069 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14070 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014071 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14072 notify_msg.message.error.error_stream = nullptr;
14073 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014074 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014075
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014076 if (pendingRequest->input_buffer != nullptr) {
14077 camera3_capture_result result = {};
14078 result.frame_number = pendingRequest->frame_number;
14079 result.result = nullptr;
14080 result.input_buffer = pendingRequest->input_buffer;
14081 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014082 }
14083
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014084 mShutterDispatcher.clear(pendingRequest->frame_number);
14085 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14086 } else {
14087 // If both buffers and result metadata weren't sent yet, notify about a request error
14088 // and return buffers with error.
14089 for (auto &info : pendingBuffer->mPendingBufferList) {
14090 camera3_notify_msg_t notify_msg;
14091 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14092 notify_msg.type = CAMERA3_MSG_ERROR;
14093 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14094 notify_msg.message.error.error_stream = info.stream;
14095 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14096 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014097
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014098 camera3_stream_buffer_t buffer = {};
14099 buffer.acquire_fence = -1;
14100 buffer.release_fence = -1;
14101 buffer.buffer = info.buffer;
14102 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14103 buffer.stream = info.stream;
14104 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14105 }
14106
14107 if (pendingRequest->input_buffer != nullptr) {
14108 camera3_capture_result result = {};
14109 result.frame_number = pendingRequest->frame_number;
14110 result.result = nullptr;
14111 result.input_buffer = pendingRequest->input_buffer;
14112 orchestrateResult(&result);
14113 }
14114
14115 mShutterDispatcher.clear(pendingRequest->frame_number);
14116 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14117 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014118 }
14119 }
14120
14121 /* Reset pending frame Drop list and requests list */
14122 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014123 mShutterDispatcher.clear();
14124 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014125 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014126 LOGH("Cleared all the pending buffers ");
14127
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014128 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014129}
14130
14131bool QCamera3HardwareInterface::isOnEncoder(
14132 const cam_dimension_t max_viewfinder_size,
14133 uint32_t width, uint32_t height)
14134{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014135 return ((width > (uint32_t)max_viewfinder_size.width) ||
14136 (height > (uint32_t)max_viewfinder_size.height) ||
14137 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14138 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014139}
14140
14141/*===========================================================================
14142 * FUNCTION : setBundleInfo
14143 *
14144 * DESCRIPTION: Set bundle info for all streams that are bundle.
14145 *
14146 * PARAMETERS : None
14147 *
14148 * RETURN : NO_ERROR on success
14149 * Error codes on failure
14150 *==========================================================================*/
14151int32_t QCamera3HardwareInterface::setBundleInfo()
14152{
14153 int32_t rc = NO_ERROR;
14154
14155 if (mChannelHandle) {
14156 cam_bundle_config_t bundleInfo;
14157 memset(&bundleInfo, 0, sizeof(bundleInfo));
14158 rc = mCameraHandle->ops->get_bundle_info(
14159 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14160 if (rc != NO_ERROR) {
14161 LOGE("get_bundle_info failed");
14162 return rc;
14163 }
14164 if (mAnalysisChannel) {
14165 mAnalysisChannel->setBundleInfo(bundleInfo);
14166 }
14167 if (mSupportChannel) {
14168 mSupportChannel->setBundleInfo(bundleInfo);
14169 }
14170 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14171 it != mStreamInfo.end(); it++) {
14172 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14173 channel->setBundleInfo(bundleInfo);
14174 }
14175 if (mRawDumpChannel) {
14176 mRawDumpChannel->setBundleInfo(bundleInfo);
14177 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014178 if (mHdrPlusRawSrcChannel) {
14179 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14180 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014181 }
14182
14183 return rc;
14184}
14185
14186/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014187 * FUNCTION : setInstantAEC
14188 *
14189 * DESCRIPTION: Set Instant AEC related params.
14190 *
14191 * PARAMETERS :
14192 * @meta: CameraMetadata reference
14193 *
14194 * RETURN : NO_ERROR on success
14195 * Error codes on failure
14196 *==========================================================================*/
14197int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14198{
14199 int32_t rc = NO_ERROR;
14200 uint8_t val = 0;
14201 char prop[PROPERTY_VALUE_MAX];
14202
14203 // First try to configure instant AEC from framework metadata
14204 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14205 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14206 }
14207
14208 // If framework did not set this value, try to read from set prop.
14209 if (val == 0) {
14210 memset(prop, 0, sizeof(prop));
14211 property_get("persist.camera.instant.aec", prop, "0");
14212 val = (uint8_t)atoi(prop);
14213 }
14214
14215 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14216 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14217 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14218 mInstantAEC = val;
14219 mInstantAECSettledFrameNumber = 0;
14220 mInstantAecFrameIdxCount = 0;
14221 LOGH("instantAEC value set %d",val);
14222 if (mInstantAEC) {
14223 memset(prop, 0, sizeof(prop));
14224 property_get("persist.camera.ae.instant.bound", prop, "10");
14225 int32_t aec_frame_skip_cnt = atoi(prop);
14226 if (aec_frame_skip_cnt >= 0) {
14227 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14228 } else {
14229 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14230 rc = BAD_VALUE;
14231 }
14232 }
14233 } else {
14234 LOGE("Bad instant aec value set %d", val);
14235 rc = BAD_VALUE;
14236 }
14237 return rc;
14238}
14239
14240/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014241 * FUNCTION : get_num_overall_buffers
14242 *
14243 * DESCRIPTION: Estimate number of pending buffers across all requests.
14244 *
14245 * PARAMETERS : None
14246 *
14247 * RETURN : Number of overall pending buffers
14248 *
14249 *==========================================================================*/
14250uint32_t PendingBuffersMap::get_num_overall_buffers()
14251{
14252 uint32_t sum_buffers = 0;
14253 for (auto &req : mPendingBuffersInRequest) {
14254 sum_buffers += req.mPendingBufferList.size();
14255 }
14256 return sum_buffers;
14257}
14258
14259/*===========================================================================
14260 * FUNCTION : removeBuf
14261 *
14262 * DESCRIPTION: Remove a matching buffer from tracker.
14263 *
14264 * PARAMETERS : @buffer: image buffer for the callback
14265 *
14266 * RETURN : None
14267 *
14268 *==========================================================================*/
14269void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14270{
14271 bool buffer_found = false;
14272 for (auto req = mPendingBuffersInRequest.begin();
14273 req != mPendingBuffersInRequest.end(); req++) {
14274 for (auto k = req->mPendingBufferList.begin();
14275 k != req->mPendingBufferList.end(); k++ ) {
14276 if (k->buffer == buffer) {
14277 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14278 req->frame_number, buffer);
14279 k = req->mPendingBufferList.erase(k);
14280 if (req->mPendingBufferList.empty()) {
14281 // Remove this request from Map
14282 req = mPendingBuffersInRequest.erase(req);
14283 }
14284 buffer_found = true;
14285 break;
14286 }
14287 }
14288 if (buffer_found) {
14289 break;
14290 }
14291 }
14292 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14293 get_num_overall_buffers());
14294}
14295
14296/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014297 * FUNCTION : getBufErrStatus
14298 *
14299 * DESCRIPTION: get buffer error status
14300 *
14301 * PARAMETERS : @buffer: buffer handle
14302 *
14303 * RETURN : Error status
14304 *
14305 *==========================================================================*/
14306int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14307{
14308 for (auto& req : mPendingBuffersInRequest) {
14309 for (auto& k : req.mPendingBufferList) {
14310 if (k.buffer == buffer)
14311 return k.bufStatus;
14312 }
14313 }
14314 return CAMERA3_BUFFER_STATUS_OK;
14315}
14316
14317/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014318 * FUNCTION : setPAAFSupport
14319 *
14320 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14321 * feature mask according to stream type and filter
14322 * arrangement
14323 *
14324 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14325 * @stream_type: stream type
14326 * @filter_arrangement: filter arrangement
14327 *
14328 * RETURN : None
14329 *==========================================================================*/
14330void QCamera3HardwareInterface::setPAAFSupport(
14331 cam_feature_mask_t& feature_mask,
14332 cam_stream_type_t stream_type,
14333 cam_color_filter_arrangement_t filter_arrangement)
14334{
Thierry Strudel3d639192016-09-09 11:52:26 -070014335 switch (filter_arrangement) {
14336 case CAM_FILTER_ARRANGEMENT_RGGB:
14337 case CAM_FILTER_ARRANGEMENT_GRBG:
14338 case CAM_FILTER_ARRANGEMENT_GBRG:
14339 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014340 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14341 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014342 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014343 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14344 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014345 }
14346 break;
14347 case CAM_FILTER_ARRANGEMENT_Y:
14348 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14349 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14350 }
14351 break;
14352 default:
14353 break;
14354 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014355 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14356 feature_mask, stream_type, filter_arrangement);
14357
14358
Thierry Strudel3d639192016-09-09 11:52:26 -070014359}
14360
14361/*===========================================================================
14362* FUNCTION : getSensorMountAngle
14363*
14364* DESCRIPTION: Retrieve sensor mount angle
14365*
14366* PARAMETERS : None
14367*
14368* RETURN : sensor mount angle in uint32_t
14369*==========================================================================*/
14370uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14371{
14372 return gCamCapability[mCameraId]->sensor_mount_angle;
14373}
14374
14375/*===========================================================================
14376* FUNCTION : getRelatedCalibrationData
14377*
14378* DESCRIPTION: Retrieve related system calibration data
14379*
14380* PARAMETERS : None
14381*
14382* RETURN : Pointer of related system calibration data
14383*==========================================================================*/
14384const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14385{
14386 return (const cam_related_system_calibration_data_t *)
14387 &(gCamCapability[mCameraId]->related_cam_calibration);
14388}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014389
14390/*===========================================================================
14391 * FUNCTION : is60HzZone
14392 *
14393 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14394 *
14395 * PARAMETERS : None
14396 *
14397 * RETURN : True if in 60Hz zone, False otherwise
14398 *==========================================================================*/
14399bool QCamera3HardwareInterface::is60HzZone()
14400{
14401 time_t t = time(NULL);
14402 struct tm lt;
14403
14404 struct tm* r = localtime_r(&t, &lt);
14405
14406 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14407 return true;
14408 else
14409 return false;
14410}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014411
14412/*===========================================================================
14413 * FUNCTION : adjustBlackLevelForCFA
14414 *
14415 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14416 * of bayer CFA (Color Filter Array).
14417 *
14418 * PARAMETERS : @input: black level pattern in the order of RGGB
14419 * @output: black level pattern in the order of CFA
14420 * @color_arrangement: CFA color arrangement
14421 *
14422 * RETURN : None
14423 *==========================================================================*/
14424template<typename T>
14425void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14426 T input[BLACK_LEVEL_PATTERN_CNT],
14427 T output[BLACK_LEVEL_PATTERN_CNT],
14428 cam_color_filter_arrangement_t color_arrangement)
14429{
14430 switch (color_arrangement) {
14431 case CAM_FILTER_ARRANGEMENT_GRBG:
14432 output[0] = input[1];
14433 output[1] = input[0];
14434 output[2] = input[3];
14435 output[3] = input[2];
14436 break;
14437 case CAM_FILTER_ARRANGEMENT_GBRG:
14438 output[0] = input[2];
14439 output[1] = input[3];
14440 output[2] = input[0];
14441 output[3] = input[1];
14442 break;
14443 case CAM_FILTER_ARRANGEMENT_BGGR:
14444 output[0] = input[3];
14445 output[1] = input[2];
14446 output[2] = input[1];
14447 output[3] = input[0];
14448 break;
14449 case CAM_FILTER_ARRANGEMENT_RGGB:
14450 output[0] = input[0];
14451 output[1] = input[1];
14452 output[2] = input[2];
14453 output[3] = input[3];
14454 break;
14455 default:
14456 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14457 break;
14458 }
14459}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014460
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014461void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14462 CameraMetadata &resultMetadata,
14463 std::shared_ptr<metadata_buffer_t> settings)
14464{
14465 if (settings == nullptr) {
14466 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14467 return;
14468 }
14469
14470 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14471 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14472 }
14473
14474 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14475 String8 str((const char *)gps_methods);
14476 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14477 }
14478
14479 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14480 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14481 }
14482
14483 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14484 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14485 }
14486
14487 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14488 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14489 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14490 }
14491
14492 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14493 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14494 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14495 }
14496
14497 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14498 int32_t fwk_thumb_size[2];
14499 fwk_thumb_size[0] = thumb_size->width;
14500 fwk_thumb_size[1] = thumb_size->height;
14501 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14502 }
14503
14504 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14505 uint8_t fwk_intent = intent[0];
14506 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14507 }
14508}
14509
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014510bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14511 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14512 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014513{
14514 if (hdrPlusRequest == nullptr) return false;
14515
14516 // Check noise reduction mode is high quality.
14517 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14518 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14519 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014520 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14521 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014522 return false;
14523 }
14524
14525 // Check edge mode is high quality.
14526 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14527 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14528 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14529 return false;
14530 }
14531
14532 if (request.num_output_buffers != 1 ||
14533 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14534 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014535 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14536 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14537 request.output_buffers[0].stream->width,
14538 request.output_buffers[0].stream->height,
14539 request.output_buffers[0].stream->format);
14540 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014541 return false;
14542 }
14543
14544 // Get a YUV buffer from pic channel.
14545 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14546 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14547 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14548 if (res != OK) {
14549 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14550 __FUNCTION__, strerror(-res), res);
14551 return false;
14552 }
14553
14554 pbcamera::StreamBuffer buffer;
14555 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014556 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014557 buffer.data = yuvBuffer->buffer;
14558 buffer.dataSize = yuvBuffer->frame_len;
14559
14560 pbcamera::CaptureRequest pbRequest;
14561 pbRequest.id = request.frame_number;
14562 pbRequest.outputBuffers.push_back(buffer);
14563
14564 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014565 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014566 if (res != OK) {
14567 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14568 strerror(-res), res);
14569 return false;
14570 }
14571
14572 hdrPlusRequest->yuvBuffer = yuvBuffer;
14573 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14574
14575 return true;
14576}
14577
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014578status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14579{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014580 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14581 return OK;
14582 }
14583
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014584 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014585 if (res != OK) {
14586 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14587 strerror(-res), res);
14588 return res;
14589 }
14590 gHdrPlusClientOpening = true;
14591
14592 return OK;
14593}
14594
Chien-Yu Chenee335912017-02-09 17:53:20 -080014595status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14596{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014597 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014598
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014599 if (mHdrPlusModeEnabled) {
14600 return OK;
14601 }
14602
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014603 // Check if gHdrPlusClient is opened or being opened.
14604 if (gHdrPlusClient == nullptr) {
14605 if (gHdrPlusClientOpening) {
14606 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14607 return OK;
14608 }
14609
14610 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014611 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014612 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14613 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014614 return res;
14615 }
14616
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014617 // When opening HDR+ client completes, HDR+ mode will be enabled.
14618 return OK;
14619
Chien-Yu Chenee335912017-02-09 17:53:20 -080014620 }
14621
14622 // Configure stream for HDR+.
14623 res = configureHdrPlusStreamsLocked();
14624 if (res != OK) {
14625 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014626 return res;
14627 }
14628
14629 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14630 res = gHdrPlusClient->setZslHdrPlusMode(true);
14631 if (res != OK) {
14632 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014633 return res;
14634 }
14635
14636 mHdrPlusModeEnabled = true;
14637 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14638
14639 return OK;
14640}
14641
14642void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14643{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014644 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014645 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014646 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14647 if (res != OK) {
14648 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14649 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014650
14651 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014652 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014653 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014654 }
14655
14656 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014657 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014658 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14659}
14660
14661status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014662{
14663 pbcamera::InputConfiguration inputConfig;
14664 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14665 status_t res = OK;
14666
14667 // Configure HDR+ client streams.
14668 // Get input config.
14669 if (mHdrPlusRawSrcChannel) {
14670 // HDR+ input buffers will be provided by HAL.
14671 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14672 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14673 if (res != OK) {
14674 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14675 __FUNCTION__, strerror(-res), res);
14676 return res;
14677 }
14678
14679 inputConfig.isSensorInput = false;
14680 } else {
14681 // Sensor MIPI will send data to Easel.
14682 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014683 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014684 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14685 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14686 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14687 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14688 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014689 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014690 if (mSensorModeInfo.num_raw_bits != 10) {
14691 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14692 mSensorModeInfo.num_raw_bits);
14693 return BAD_VALUE;
14694 }
14695
14696 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014697 }
14698
14699 // Get output configurations.
14700 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014701 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014702
14703 // Easel may need to output YUV output buffers if mPictureChannel was created.
14704 pbcamera::StreamConfiguration yuvOutputConfig;
14705 if (mPictureChannel != nullptr) {
14706 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14707 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14708 if (res != OK) {
14709 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14710 __FUNCTION__, strerror(-res), res);
14711
14712 return res;
14713 }
14714
14715 outputStreamConfigs.push_back(yuvOutputConfig);
14716 }
14717
14718 // TODO: consider other channels for YUV output buffers.
14719
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014720 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014721 if (res != OK) {
14722 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14723 strerror(-res), res);
14724 return res;
14725 }
14726
14727 return OK;
14728}
14729
Chien-Yu Chen933db802017-07-14 14:31:53 -070014730void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
14731{
14732 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
14733 // Set HAL state to error.
14734 pthread_mutex_lock(&mMutex);
14735 mState = ERROR;
14736 pthread_mutex_unlock(&mMutex);
14737
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014738 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen933db802017-07-14 14:31:53 -070014739}
14740
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014741void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14742{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014743 if (client == nullptr) {
14744 ALOGE("%s: Opened client is null.", __FUNCTION__);
14745 return;
14746 }
14747
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014748 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014749 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14750
14751 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014752 if (!gHdrPlusClientOpening) {
14753 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14754 return;
14755 }
14756
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014757 gHdrPlusClient = std::move(client);
14758 gHdrPlusClientOpening = false;
14759
14760 // Set static metadata.
14761 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14762 if (res != OK) {
14763 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14764 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014765 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014766 gHdrPlusClient = nullptr;
14767 return;
14768 }
14769
14770 // Enable HDR+ mode.
14771 res = enableHdrPlusModeLocked();
14772 if (res != OK) {
14773 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14774 }
14775}
14776
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014777void QCamera3HardwareInterface::onOpenFailed(status_t err)
14778{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014779 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14780 Mutex::Autolock l(gHdrPlusClientLock);
14781 gHdrPlusClientOpening = false;
14782}
14783
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014784void QCamera3HardwareInterface::onFatalError()
14785{
14786 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14787
14788 // Set HAL state to error.
14789 pthread_mutex_lock(&mMutex);
14790 mState = ERROR;
14791 pthread_mutex_unlock(&mMutex);
14792
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014793 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014794}
14795
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014796void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014797 const camera_metadata_t &resultMetadata)
14798{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014799 if (result != nullptr) {
14800 if (result->outputBuffers.size() != 1) {
14801 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14802 result->outputBuffers.size());
14803 return;
14804 }
14805
14806 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14807 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14808 result->outputBuffers[0].streamId);
14809 return;
14810 }
14811
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014812 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014813 HdrPlusPendingRequest pendingRequest;
14814 {
14815 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14816 auto req = mHdrPlusPendingRequests.find(result->requestId);
14817 pendingRequest = req->second;
14818 }
14819
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014820 // Update the result metadata with the settings of the HDR+ still capture request because
14821 // the result metadata belongs to a ZSL buffer.
14822 CameraMetadata metadata;
14823 metadata = &resultMetadata;
14824 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14825 camera_metadata_t* updatedResultMetadata = metadata.release();
14826
14827 QCamera3PicChannel *picChannel =
14828 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14829
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014830 // Check if dumping HDR+ YUV output is enabled.
14831 char prop[PROPERTY_VALUE_MAX];
14832 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14833 bool dumpYuvOutput = atoi(prop);
14834
14835 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014836 // Dump yuv buffer to a ppm file.
14837 pbcamera::StreamConfiguration outputConfig;
14838 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14839 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14840 if (rc == OK) {
14841 char buf[FILENAME_MAX] = {};
14842 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14843 result->requestId, result->outputBuffers[0].streamId,
14844 outputConfig.image.width, outputConfig.image.height);
14845
14846 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14847 } else {
14848 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14849 __FUNCTION__, strerror(-rc), rc);
14850 }
14851 }
14852
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014853 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14854 auto halMetadata = std::make_shared<metadata_buffer_t>();
14855 clear_metadata_buffer(halMetadata.get());
14856
14857 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14858 // encoding.
14859 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14860 halStreamId, /*minFrameDuration*/0);
14861 if (res == OK) {
14862 // Return the buffer to pic channel for encoding.
14863 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14864 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14865 halMetadata);
14866 } else {
14867 // Return the buffer without encoding.
14868 // TODO: This should not happen but we may want to report an error buffer to camera
14869 // service.
14870 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14871 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14872 strerror(-res), res);
14873 }
14874
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014875 // Find the timestamp
14876 camera_metadata_ro_entry_t entry;
14877 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14878 ANDROID_SENSOR_TIMESTAMP, &entry);
14879 if (res != OK) {
14880 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14881 __FUNCTION__, result->requestId, strerror(-res), res);
14882 } else {
14883 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14884 }
14885
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014886 // Send HDR+ metadata to framework.
14887 {
14888 pthread_mutex_lock(&mMutex);
14889
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014890 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14891 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014892 pthread_mutex_unlock(&mMutex);
14893 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014894
14895 // Remove the HDR+ pending request.
14896 {
14897 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14898 auto req = mHdrPlusPendingRequests.find(result->requestId);
14899 mHdrPlusPendingRequests.erase(req);
14900 }
14901 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014902}
14903
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014904void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14905{
14906 if (failedResult == nullptr) {
14907 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14908 return;
14909 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014910
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014911 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014912
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014913 // Remove the pending HDR+ request.
14914 {
14915 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14916 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14917
14918 // Return the buffer to pic channel.
14919 QCamera3PicChannel *picChannel =
14920 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14921 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14922
14923 mHdrPlusPendingRequests.erase(pendingRequest);
14924 }
14925
14926 pthread_mutex_lock(&mMutex);
14927
14928 // Find the pending buffers.
14929 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14930 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14931 if (pendingBuffers->frame_number == failedResult->requestId) {
14932 break;
14933 }
14934 pendingBuffers++;
14935 }
14936
14937 // Send out buffer errors for the pending buffers.
14938 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14939 std::vector<camera3_stream_buffer_t> streamBuffers;
14940 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14941 // Prepare a stream buffer.
14942 camera3_stream_buffer_t streamBuffer = {};
14943 streamBuffer.stream = buffer.stream;
14944 streamBuffer.buffer = buffer.buffer;
14945 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14946 streamBuffer.acquire_fence = -1;
14947 streamBuffer.release_fence = -1;
14948
14949 streamBuffers.push_back(streamBuffer);
14950
14951 // Send out error buffer event.
14952 camera3_notify_msg_t notify_msg = {};
14953 notify_msg.type = CAMERA3_MSG_ERROR;
14954 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14955 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14956 notify_msg.message.error.error_stream = buffer.stream;
14957
14958 orchestrateNotify(&notify_msg);
14959 }
14960
14961 camera3_capture_result_t result = {};
14962 result.frame_number = pendingBuffers->frame_number;
14963 result.num_output_buffers = streamBuffers.size();
14964 result.output_buffers = &streamBuffers[0];
14965
14966 // Send out result with buffer errors.
14967 orchestrateResult(&result);
14968
14969 // Remove pending buffers.
14970 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14971 }
14972
14973 // Remove pending request.
14974 auto halRequest = mPendingRequestsList.begin();
14975 while (halRequest != mPendingRequestsList.end()) {
14976 if (halRequest->frame_number == failedResult->requestId) {
14977 mPendingRequestsList.erase(halRequest);
14978 break;
14979 }
14980 halRequest++;
14981 }
14982
14983 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014984}
14985
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014986
14987ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14988 mParent(parent) {}
14989
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014990void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014991{
14992 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014993
14994 if (isReprocess) {
14995 mReprocessShutters.emplace(frameNumber, Shutter());
14996 } else {
14997 mShutters.emplace(frameNumber, Shutter());
14998 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014999}
15000
15001void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15002{
15003 std::lock_guard<std::mutex> lock(mLock);
15004
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015005 std::map<uint32_t, Shutter> *shutters = nullptr;
15006
15007 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015008 auto shutter = mShutters.find(frameNumber);
15009 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015010 shutter = mReprocessShutters.find(frameNumber);
15011 if (shutter == mReprocessShutters.end()) {
15012 // Shutter was already sent.
15013 return;
15014 }
15015 shutters = &mReprocessShutters;
15016 } else {
15017 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015018 }
15019
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015020 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015021 shutter->second.ready = true;
15022 shutter->second.timestamp = timestamp;
15023
15024 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015025 shutter = shutters->begin();
15026 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015027 if (!shutter->second.ready) {
15028 // If this shutter is not ready, the following shutters can't be sent.
15029 break;
15030 }
15031
15032 camera3_notify_msg_t msg = {};
15033 msg.type = CAMERA3_MSG_SHUTTER;
15034 msg.message.shutter.frame_number = shutter->first;
15035 msg.message.shutter.timestamp = shutter->second.timestamp;
15036 mParent->orchestrateNotify(&msg);
15037
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015038 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015039 }
15040}
15041
15042void ShutterDispatcher::clear(uint32_t frameNumber)
15043{
15044 std::lock_guard<std::mutex> lock(mLock);
15045 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015046 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015047}
15048
15049void ShutterDispatcher::clear()
15050{
15051 std::lock_guard<std::mutex> lock(mLock);
15052
15053 // Log errors for stale shutters.
15054 for (auto &shutter : mShutters) {
15055 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15056 __FUNCTION__, shutter.first, shutter.second.ready,
15057 shutter.second.timestamp);
15058 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015059
15060 // Log errors for stale reprocess shutters.
15061 for (auto &shutter : mReprocessShutters) {
15062 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15063 __FUNCTION__, shutter.first, shutter.second.ready,
15064 shutter.second.timestamp);
15065 }
15066
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015067 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015068 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015069}
15070
15071OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15072 mParent(parent) {}
15073
15074status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15075{
15076 std::lock_guard<std::mutex> lock(mLock);
15077 mStreamBuffers.clear();
15078 if (!streamList) {
15079 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15080 return -EINVAL;
15081 }
15082
15083 // Create a "frame-number -> buffer" map for each stream.
15084 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15085 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15086 }
15087
15088 return OK;
15089}
15090
15091status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15092{
15093 std::lock_guard<std::mutex> lock(mLock);
15094
15095 // Find the "frame-number -> buffer" map for the stream.
15096 auto buffers = mStreamBuffers.find(stream);
15097 if (buffers == mStreamBuffers.end()) {
15098 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15099 return -EINVAL;
15100 }
15101
15102 // Create an unready buffer for this frame number.
15103 buffers->second.emplace(frameNumber, Buffer());
15104 return OK;
15105}
15106
15107void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15108 const camera3_stream_buffer_t &buffer)
15109{
15110 std::lock_guard<std::mutex> lock(mLock);
15111
15112 // Find the frame number -> buffer map for the stream.
15113 auto buffers = mStreamBuffers.find(buffer.stream);
15114 if (buffers == mStreamBuffers.end()) {
15115 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15116 return;
15117 }
15118
15119 // Find the unready buffer this frame number and mark it ready.
15120 auto pendingBuffer = buffers->second.find(frameNumber);
15121 if (pendingBuffer == buffers->second.end()) {
15122 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15123 return;
15124 }
15125
15126 pendingBuffer->second.ready = true;
15127 pendingBuffer->second.buffer = buffer;
15128
15129 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15130 pendingBuffer = buffers->second.begin();
15131 while (pendingBuffer != buffers->second.end()) {
15132 if (!pendingBuffer->second.ready) {
15133 // If this buffer is not ready, the following buffers can't be sent.
15134 break;
15135 }
15136
15137 camera3_capture_result_t result = {};
15138 result.frame_number = pendingBuffer->first;
15139 result.num_output_buffers = 1;
15140 result.output_buffers = &pendingBuffer->second.buffer;
15141
15142 // Send out result with buffer errors.
15143 mParent->orchestrateResult(&result);
15144
15145 pendingBuffer = buffers->second.erase(pendingBuffer);
15146 }
15147}
15148
15149void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15150{
15151 std::lock_guard<std::mutex> lock(mLock);
15152
15153 // Log errors for stale buffers.
15154 for (auto &buffers : mStreamBuffers) {
15155 for (auto &buffer : buffers.second) {
15156 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15157 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15158 }
15159 buffers.second.clear();
15160 }
15161
15162 if (clearConfiguredStreams) {
15163 mStreamBuffers.clear();
15164 }
15165}
15166
Thierry Strudel3d639192016-09-09 11:52:26 -070015167}; //end namespace qcamera