blob: 15757a64dbada10104e3a140c9a83216176cf89e [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100100#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700136// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
137#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
138
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700139// Whether to check for the GPU stride padding, or use the default
140//#define CHECK_GPU_PIXEL_ALIGNMENT
141
Thierry Strudel3d639192016-09-09 11:52:26 -0700142cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
143const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
144extern pthread_mutex_t gCamLock;
145volatile uint32_t gCamHal3LogLevel = 1;
146extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800148// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700150std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700151bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
152std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
153bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700154bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700155bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
160Mutex gHdrPlusClientLock; // Protect above Easel related variables.
161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
280};
281
282const QCamera3HardwareInterface::QCameraMap<
283 camera_metadata_enum_android_flash_mode_t,
284 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
285 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
286 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
287 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
288};
289
290const QCamera3HardwareInterface::QCameraMap<
291 camera_metadata_enum_android_statistics_face_detect_mode_t,
292 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
296};
297
298const QCamera3HardwareInterface::QCameraMap<
299 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
300 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
301 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
302 CAM_FOCUS_UNCALIBRATED },
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
304 CAM_FOCUS_APPROXIMATE },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
306 CAM_FOCUS_CALIBRATED }
307};
308
309const QCamera3HardwareInterface::QCameraMap<
310 camera_metadata_enum_android_lens_state_t,
311 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
312 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
313 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
314};
315
316const int32_t available_thumbnail_sizes[] = {0, 0,
317 176, 144,
318 240, 144,
319 256, 144,
320 240, 160,
321 256, 154,
322 240, 240,
323 320, 240};
324
325const QCamera3HardwareInterface::QCameraMap<
326 camera_metadata_enum_android_sensor_test_pattern_mode_t,
327 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
334};
335
336/* Since there is no mapping for all the options some Android enum are not listed.
337 * Also, the order in this list is important because while mapping from HAL to Android it will
338 * traverse from lower to higher index which means that for HAL values that are map to different
339 * Android values, the traverse logic will select the first one found.
340 */
341const QCamera3HardwareInterface::QCameraMap<
342 camera_metadata_enum_android_sensor_reference_illuminant1_t,
343 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
360};
361
362const QCamera3HardwareInterface::QCameraMap<
363 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
364 { 60, CAM_HFR_MODE_60FPS},
365 { 90, CAM_HFR_MODE_90FPS},
366 { 120, CAM_HFR_MODE_120FPS},
367 { 150, CAM_HFR_MODE_150FPS},
368 { 180, CAM_HFR_MODE_180FPS},
369 { 210, CAM_HFR_MODE_210FPS},
370 { 240, CAM_HFR_MODE_240FPS},
371 { 480, CAM_HFR_MODE_480FPS},
372};
373
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700374const QCamera3HardwareInterface::QCameraMap<
375 qcamera3_ext_instant_aec_mode_t,
376 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
377 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
380};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800381
382const QCamera3HardwareInterface::QCameraMap<
383 qcamera3_ext_exposure_meter_mode_t,
384 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
385 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
386 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
387 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
388 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
389 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
390 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
391 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
392};
393
394const QCamera3HardwareInterface::QCameraMap<
395 qcamera3_ext_iso_mode_t,
396 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
397 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
398 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
399 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
400 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
401 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
402 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
403 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
404 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
405};
406
Thierry Strudel3d639192016-09-09 11:52:26 -0700407camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
408 .initialize = QCamera3HardwareInterface::initialize,
409 .configure_streams = QCamera3HardwareInterface::configure_streams,
410 .register_stream_buffers = NULL,
411 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
412 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
413 .get_metadata_vendor_tag_ops = NULL,
414 .dump = QCamera3HardwareInterface::dump,
415 .flush = QCamera3HardwareInterface::flush,
416 .reserved = {0},
417};
418
419// initialise to some default value
420uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
421
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700422static inline void logEaselEvent(const char *tag, const char *event) {
423 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
424 struct timespec ts = {};
425 static int64_t kMsPerSec = 1000;
426 static int64_t kNsPerMs = 1000000;
427 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
428 if (res != OK) {
429 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
430 } else {
431 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
432 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
433 }
434 }
435}
436
Thierry Strudel3d639192016-09-09 11:52:26 -0700437/*===========================================================================
438 * FUNCTION : QCamera3HardwareInterface
439 *
440 * DESCRIPTION: constructor of QCamera3HardwareInterface
441 *
442 * PARAMETERS :
443 * @cameraId : camera ID
444 *
445 * RETURN : none
446 *==========================================================================*/
447QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
448 const camera_module_callbacks_t *callbacks)
449 : mCameraId(cameraId),
450 mCameraHandle(NULL),
451 mCameraInitialized(false),
452 mCallbackOps(NULL),
453 mMetadataChannel(NULL),
454 mPictureChannel(NULL),
455 mRawChannel(NULL),
456 mSupportChannel(NULL),
457 mAnalysisChannel(NULL),
458 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700459 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700460 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800461 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100462 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800463 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700464 mChannelHandle(0),
465 mFirstConfiguration(true),
466 mFlush(false),
467 mFlushPerf(false),
468 mParamHeap(NULL),
469 mParameters(NULL),
470 mPrevParameters(NULL),
471 m_bIsVideo(false),
472 m_bIs4KVideo(false),
473 m_bEisSupportedSize(false),
474 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800475 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700476 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700477 mShutterDispatcher(this),
478 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 mMinProcessedFrameDuration(0),
480 mMinJpegFrameDuration(0),
481 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100482 mExpectedFrameDuration(0),
483 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700484 mMetaFrameCount(0U),
485 mUpdateDebugLevel(false),
486 mCallbacks(callbacks),
487 mCaptureIntent(0),
488 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700489 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800490 /* DevCamDebug metadata internal m control*/
491 mDevCamDebugMetaEnable(0),
492 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700493 mBatchSize(0),
494 mToBeQueuedVidBufs(0),
495 mHFRVideoFps(DEFAULT_VIDEO_FPS),
496 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800497 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800498 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700499 mFirstFrameNumberInBatch(0),
500 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800501 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700502 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
503 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000504 mPDSupported(false),
505 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700506 mInstantAEC(false),
507 mResetInstantAEC(false),
508 mInstantAECSettledFrameNumber(0),
509 mAecSkipDisplayFrameBound(0),
510 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800511 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700514 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mState(CLOSED),
516 mIsDeviceLinked(false),
517 mIsMainCamera(true),
518 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700519 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800520 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800521 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700522 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800523 mIsApInputUsedForHdrPlus(false),
524 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700525 m_bSensorHDREnabled(false),
526 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700527{
528 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700529 mCommon.init(gCamCapability[cameraId]);
530 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700531#ifndef USE_HAL_3_3
532 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
533#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700535#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700536 mCameraDevice.common.close = close_camera_device;
537 mCameraDevice.ops = &mCameraOps;
538 mCameraDevice.priv = this;
539 gCamCapability[cameraId]->version = CAM_HAL_V3;
540 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
541 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
542 gCamCapability[cameraId]->min_num_pp_bufs = 3;
543
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800544 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700545
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800546 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700547 mPendingLiveRequest = 0;
548 mCurrentRequestId = -1;
549 pthread_mutex_init(&mMutex, NULL);
550
551 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
552 mDefaultMetadata[i] = NULL;
553
554 // Getting system props of different kinds
555 char prop[PROPERTY_VALUE_MAX];
556 memset(prop, 0, sizeof(prop));
557 property_get("persist.camera.raw.dump", prop, "0");
558 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800559 property_get("persist.camera.hal3.force.hdr", prop, "0");
560 mForceHdrSnapshot = atoi(prop);
561
Thierry Strudel3d639192016-09-09 11:52:26 -0700562 if (mEnableRawDump)
563 LOGD("Raw dump from Camera HAL enabled");
564
565 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
566 memset(mLdafCalib, 0, sizeof(mLdafCalib));
567
568 memset(prop, 0, sizeof(prop));
569 property_get("persist.camera.tnr.preview", prop, "0");
570 m_bTnrPreview = (uint8_t)atoi(prop);
571
572 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800573 property_get("persist.camera.swtnr.preview", prop, "1");
574 m_bSwTnrPreview = (uint8_t)atoi(prop);
575
576 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700577 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700578 m_bTnrVideo = (uint8_t)atoi(prop);
579
580 memset(prop, 0, sizeof(prop));
581 property_get("persist.camera.avtimer.debug", prop, "0");
582 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800583 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700584
Thierry Strudel54dc9782017-02-15 12:12:10 -0800585 memset(prop, 0, sizeof(prop));
586 property_get("persist.camera.cacmode.disable", prop, "0");
587 m_cacModeDisabled = (uint8_t)atoi(prop);
588
Thierry Strudel3d639192016-09-09 11:52:26 -0700589 //Load and read GPU library.
590 lib_surface_utils = NULL;
591 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700592 mSurfaceStridePadding = CAM_PAD_TO_64;
593#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700594 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
595 if (lib_surface_utils) {
596 *(void **)&LINK_get_surface_pixel_alignment =
597 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
598 if (LINK_get_surface_pixel_alignment) {
599 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
600 }
601 dlclose(lib_surface_utils);
602 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700603#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000604 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
605 mPDSupported = (0 <= mPDIndex) ? true : false;
606
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700607 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700608}
609
610/*===========================================================================
611 * FUNCTION : ~QCamera3HardwareInterface
612 *
613 * DESCRIPTION: destructor of QCamera3HardwareInterface
614 *
615 * PARAMETERS : none
616 *
617 * RETURN : none
618 *==========================================================================*/
619QCamera3HardwareInterface::~QCamera3HardwareInterface()
620{
621 LOGD("E");
622
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800623 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700624
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800625 // Disable power hint and enable the perf lock for close camera
626 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
627 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
628
629 // unlink of dualcam during close camera
630 if (mIsDeviceLinked) {
631 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
632 &m_pDualCamCmdPtr->bundle_info;
633 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
634 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
635 pthread_mutex_lock(&gCamLock);
636
637 if (mIsMainCamera == 1) {
638 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
639 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
640 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
641 // related session id should be session id of linked session
642 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
643 } else {
644 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
645 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
646 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
647 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
648 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800649 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800650 pthread_mutex_unlock(&gCamLock);
651
652 rc = mCameraHandle->ops->set_dual_cam_cmd(
653 mCameraHandle->camera_handle);
654 if (rc < 0) {
655 LOGE("Dualcam: Unlink failed, but still proceed to close");
656 }
657 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700658
659 /* We need to stop all streams before deleting any stream */
660 if (mRawDumpChannel) {
661 mRawDumpChannel->stop();
662 }
663
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700664 if (mHdrPlusRawSrcChannel) {
665 mHdrPlusRawSrcChannel->stop();
666 }
667
Thierry Strudel3d639192016-09-09 11:52:26 -0700668 // NOTE: 'camera3_stream_t *' objects are already freed at
669 // this stage by the framework
670 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
671 it != mStreamInfo.end(); it++) {
672 QCamera3ProcessingChannel *channel = (*it)->channel;
673 if (channel) {
674 channel->stop();
675 }
676 }
677 if (mSupportChannel)
678 mSupportChannel->stop();
679
680 if (mAnalysisChannel) {
681 mAnalysisChannel->stop();
682 }
683 if (mMetadataChannel) {
684 mMetadataChannel->stop();
685 }
686 if (mChannelHandle) {
687 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700688 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700689 LOGD("stopping channel %d", mChannelHandle);
690 }
691
692 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
693 it != mStreamInfo.end(); it++) {
694 QCamera3ProcessingChannel *channel = (*it)->channel;
695 if (channel)
696 delete channel;
697 free (*it);
698 }
699 if (mSupportChannel) {
700 delete mSupportChannel;
701 mSupportChannel = NULL;
702 }
703
704 if (mAnalysisChannel) {
705 delete mAnalysisChannel;
706 mAnalysisChannel = NULL;
707 }
708 if (mRawDumpChannel) {
709 delete mRawDumpChannel;
710 mRawDumpChannel = NULL;
711 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700712 if (mHdrPlusRawSrcChannel) {
713 delete mHdrPlusRawSrcChannel;
714 mHdrPlusRawSrcChannel = NULL;
715 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700716 if (mDummyBatchChannel) {
717 delete mDummyBatchChannel;
718 mDummyBatchChannel = NULL;
719 }
720
721 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800722 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700723
724 if (mMetadataChannel) {
725 delete mMetadataChannel;
726 mMetadataChannel = NULL;
727 }
728
729 /* Clean up all channels */
730 if (mCameraInitialized) {
731 if(!mFirstConfiguration){
732 //send the last unconfigure
733 cam_stream_size_info_t stream_config_info;
734 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
735 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
736 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800737 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700738 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700739 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700740 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
741 stream_config_info);
742 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
743 if (rc < 0) {
744 LOGE("set_parms failed for unconfigure");
745 }
746 }
747 deinitParameters();
748 }
749
750 if (mChannelHandle) {
751 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
752 mChannelHandle);
753 LOGH("deleting channel %d", mChannelHandle);
754 mChannelHandle = 0;
755 }
756
757 if (mState != CLOSED)
758 closeCamera();
759
760 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
761 req.mPendingBufferList.clear();
762 }
763 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700764 for (pendingRequestIterator i = mPendingRequestsList.begin();
765 i != mPendingRequestsList.end();) {
766 i = erasePendingRequest(i);
767 }
768 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
769 if (mDefaultMetadata[i])
770 free_camera_metadata(mDefaultMetadata[i]);
771
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800772 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700773
774 pthread_cond_destroy(&mRequestCond);
775
776 pthread_cond_destroy(&mBuffersCond);
777
778 pthread_mutex_destroy(&mMutex);
779 LOGD("X");
780}
781
782/*===========================================================================
783 * FUNCTION : erasePendingRequest
784 *
785 * DESCRIPTION: function to erase a desired pending request after freeing any
786 * allocated memory
787 *
788 * PARAMETERS :
789 * @i : iterator pointing to pending request to be erased
790 *
791 * RETURN : iterator pointing to the next request
792 *==========================================================================*/
793QCamera3HardwareInterface::pendingRequestIterator
794 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
795{
796 if (i->input_buffer != NULL) {
797 free(i->input_buffer);
798 i->input_buffer = NULL;
799 }
800 if (i->settings != NULL)
801 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100802
803 mExpectedInflightDuration -= i->expectedFrameDuration;
804 if (mExpectedInflightDuration < 0) {
805 LOGE("Negative expected in-flight duration!");
806 mExpectedInflightDuration = 0;
807 }
808
Thierry Strudel3d639192016-09-09 11:52:26 -0700809 return mPendingRequestsList.erase(i);
810}
811
812/*===========================================================================
813 * FUNCTION : camEvtHandle
814 *
815 * DESCRIPTION: Function registered to mm-camera-interface to handle events
816 *
817 * PARAMETERS :
818 * @camera_handle : interface layer camera handle
819 * @evt : ptr to event
820 * @user_data : user data ptr
821 *
822 * RETURN : none
823 *==========================================================================*/
824void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
825 mm_camera_event_t *evt,
826 void *user_data)
827{
828 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
829 if (obj && evt) {
830 switch(evt->server_event_type) {
831 case CAM_EVENT_TYPE_DAEMON_DIED:
832 pthread_mutex_lock(&obj->mMutex);
833 obj->mState = ERROR;
834 pthread_mutex_unlock(&obj->mMutex);
835 LOGE("Fatal, camera daemon died");
836 break;
837
838 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
839 LOGD("HAL got request pull from Daemon");
840 pthread_mutex_lock(&obj->mMutex);
841 obj->mWokenUpByDaemon = true;
842 obj->unblockRequestIfNecessary();
843 pthread_mutex_unlock(&obj->mMutex);
844 break;
845
846 default:
847 LOGW("Warning: Unhandled event %d",
848 evt->server_event_type);
849 break;
850 }
851 } else {
852 LOGE("NULL user_data/evt");
853 }
854}
855
856/*===========================================================================
857 * FUNCTION : openCamera
858 *
859 * DESCRIPTION: open camera
860 *
861 * PARAMETERS :
862 * @hw_device : double ptr for camera device struct
863 *
864 * RETURN : int32_t type of status
865 * NO_ERROR -- success
866 * none-zero failure code
867 *==========================================================================*/
868int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
869{
870 int rc = 0;
871 if (mState != CLOSED) {
872 *hw_device = NULL;
873 return PERMISSION_DENIED;
874 }
875
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700876 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800877 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700878 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
879 mCameraId);
880
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700881 if (mCameraHandle) {
882 LOGE("Failure: Camera already opened");
883 return ALREADY_EXISTS;
884 }
885
886 {
887 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700888 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700889 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen933db802017-07-14 14:31:53 -0700890 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700891 if (rc != 0) {
892 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
893 return rc;
894 }
895 }
896 }
897
Thierry Strudel3d639192016-09-09 11:52:26 -0700898 rc = openCamera();
899 if (rc == 0) {
900 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800901 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700902 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700903
904 // Suspend Easel because opening camera failed.
905 {
906 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700907 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
908 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700909 if (suspendErr != 0) {
910 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
911 strerror(-suspendErr), suspendErr);
912 }
913 }
914 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800915 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700916
Thierry Strudel3d639192016-09-09 11:52:26 -0700917 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
918 mCameraId, rc);
919
920 if (rc == NO_ERROR) {
921 mState = OPENED;
922 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800923
Thierry Strudel3d639192016-09-09 11:52:26 -0700924 return rc;
925}
926
927/*===========================================================================
928 * FUNCTION : openCamera
929 *
930 * DESCRIPTION: open camera
931 *
932 * PARAMETERS : none
933 *
934 * RETURN : int32_t type of status
935 * NO_ERROR -- success
936 * none-zero failure code
937 *==========================================================================*/
938int QCamera3HardwareInterface::openCamera()
939{
940 int rc = 0;
941 char value[PROPERTY_VALUE_MAX];
942
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800943 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800944
Thierry Strudel3d639192016-09-09 11:52:26 -0700945 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
946 if (rc < 0) {
947 LOGE("Failed to reserve flash for camera id: %d",
948 mCameraId);
949 return UNKNOWN_ERROR;
950 }
951
952 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
953 if (rc) {
954 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
955 return rc;
956 }
957
958 if (!mCameraHandle) {
959 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
960 return -ENODEV;
961 }
962
963 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
964 camEvtHandle, (void *)this);
965
966 if (rc < 0) {
967 LOGE("Error, failed to register event callback");
968 /* Not closing camera here since it is already handled in destructor */
969 return FAILED_TRANSACTION;
970 }
971
972 mExifParams.debug_params =
973 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
974 if (mExifParams.debug_params) {
975 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
976 } else {
977 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
978 return NO_MEMORY;
979 }
980 mFirstConfiguration = true;
981
982 //Notify display HAL that a camera session is active.
983 //But avoid calling the same during bootup because camera service might open/close
984 //cameras at boot time during its initialization and display service will also internally
985 //wait for camera service to initialize first while calling this display API, resulting in a
986 //deadlock situation. Since boot time camera open/close calls are made only to fetch
987 //capabilities, no need of this display bw optimization.
988 //Use "service.bootanim.exit" property to know boot status.
989 property_get("service.bootanim.exit", value, "0");
990 if (atoi(value) == 1) {
991 pthread_mutex_lock(&gCamLock);
992 if (gNumCameraSessions++ == 0) {
993 setCameraLaunchStatus(true);
994 }
995 pthread_mutex_unlock(&gCamLock);
996 }
997
998 //fill the session id needed while linking dual cam
999 pthread_mutex_lock(&gCamLock);
1000 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1001 &sessionId[mCameraId]);
1002 pthread_mutex_unlock(&gCamLock);
1003
1004 if (rc < 0) {
1005 LOGE("Error, failed to get sessiion id");
1006 return UNKNOWN_ERROR;
1007 } else {
1008 //Allocate related cam sync buffer
1009 //this is needed for the payload that goes along with bundling cmd for related
1010 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001011 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1012 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001013 if(rc != OK) {
1014 rc = NO_MEMORY;
1015 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1016 return NO_MEMORY;
1017 }
1018
1019 //Map memory for related cam sync buffer
1020 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001021 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1022 m_pDualCamCmdHeap->getFd(0),
1023 sizeof(cam_dual_camera_cmd_info_t),
1024 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001025 if(rc < 0) {
1026 LOGE("Dualcam: failed to map Related cam sync buffer");
1027 rc = FAILED_TRANSACTION;
1028 return NO_MEMORY;
1029 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001030 m_pDualCamCmdPtr =
1031 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001032 }
1033
1034 LOGH("mCameraId=%d",mCameraId);
1035
1036 return NO_ERROR;
1037}
1038
1039/*===========================================================================
1040 * FUNCTION : closeCamera
1041 *
1042 * DESCRIPTION: close camera
1043 *
1044 * PARAMETERS : none
1045 *
1046 * RETURN : int32_t type of status
1047 * NO_ERROR -- success
1048 * none-zero failure code
1049 *==========================================================================*/
1050int QCamera3HardwareInterface::closeCamera()
1051{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001052 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001053 int rc = NO_ERROR;
1054 char value[PROPERTY_VALUE_MAX];
1055
1056 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1057 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001058
1059 // unmap memory for related cam sync buffer
1060 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001061 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001062 if (NULL != m_pDualCamCmdHeap) {
1063 m_pDualCamCmdHeap->deallocate();
1064 delete m_pDualCamCmdHeap;
1065 m_pDualCamCmdHeap = NULL;
1066 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001067 }
1068
Thierry Strudel3d639192016-09-09 11:52:26 -07001069 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1070 mCameraHandle = NULL;
1071
1072 //reset session id to some invalid id
1073 pthread_mutex_lock(&gCamLock);
1074 sessionId[mCameraId] = 0xDEADBEEF;
1075 pthread_mutex_unlock(&gCamLock);
1076
1077 //Notify display HAL that there is no active camera session
1078 //but avoid calling the same during bootup. Refer to openCamera
1079 //for more details.
1080 property_get("service.bootanim.exit", value, "0");
1081 if (atoi(value) == 1) {
1082 pthread_mutex_lock(&gCamLock);
1083 if (--gNumCameraSessions == 0) {
1084 setCameraLaunchStatus(false);
1085 }
1086 pthread_mutex_unlock(&gCamLock);
1087 }
1088
Thierry Strudel3d639192016-09-09 11:52:26 -07001089 if (mExifParams.debug_params) {
1090 free(mExifParams.debug_params);
1091 mExifParams.debug_params = NULL;
1092 }
1093 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1094 LOGW("Failed to release flash for camera id: %d",
1095 mCameraId);
1096 }
1097 mState = CLOSED;
1098 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1099 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001100
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001101 {
1102 Mutex::Autolock l(gHdrPlusClientLock);
1103 if (gHdrPlusClient != nullptr) {
1104 // Disable HDR+ mode.
1105 disableHdrPlusModeLocked();
1106 // Disconnect Easel if it's connected.
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001107 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001108 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001109 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001110
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001111 if (EaselManagerClientOpened) {
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001112 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001113 if (rc != 0) {
1114 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1115 }
1116
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001117 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001118 if (rc != 0) {
1119 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1120 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001121 }
1122 }
1123
Thierry Strudel3d639192016-09-09 11:52:26 -07001124 return rc;
1125}
1126
1127/*===========================================================================
1128 * FUNCTION : initialize
1129 *
1130 * DESCRIPTION: Initialize frameworks callback functions
1131 *
1132 * PARAMETERS :
1133 * @callback_ops : callback function to frameworks
1134 *
1135 * RETURN :
1136 *
1137 *==========================================================================*/
1138int QCamera3HardwareInterface::initialize(
1139 const struct camera3_callback_ops *callback_ops)
1140{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001141 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001142 int rc;
1143
1144 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1145 pthread_mutex_lock(&mMutex);
1146
1147 // Validate current state
1148 switch (mState) {
1149 case OPENED:
1150 /* valid state */
1151 break;
1152 default:
1153 LOGE("Invalid state %d", mState);
1154 rc = -ENODEV;
1155 goto err1;
1156 }
1157
1158 rc = initParameters();
1159 if (rc < 0) {
1160 LOGE("initParamters failed %d", rc);
1161 goto err1;
1162 }
1163 mCallbackOps = callback_ops;
1164
1165 mChannelHandle = mCameraHandle->ops->add_channel(
1166 mCameraHandle->camera_handle, NULL, NULL, this);
1167 if (mChannelHandle == 0) {
1168 LOGE("add_channel failed");
1169 rc = -ENOMEM;
1170 pthread_mutex_unlock(&mMutex);
1171 return rc;
1172 }
1173
1174 pthread_mutex_unlock(&mMutex);
1175 mCameraInitialized = true;
1176 mState = INITIALIZED;
1177 LOGI("X");
1178 return 0;
1179
1180err1:
1181 pthread_mutex_unlock(&mMutex);
1182 return rc;
1183}
1184
1185/*===========================================================================
1186 * FUNCTION : validateStreamDimensions
1187 *
1188 * DESCRIPTION: Check if the configuration requested are those advertised
1189 *
1190 * PARAMETERS :
1191 * @stream_list : streams to be configured
1192 *
1193 * RETURN :
1194 *
1195 *==========================================================================*/
1196int QCamera3HardwareInterface::validateStreamDimensions(
1197 camera3_stream_configuration_t *streamList)
1198{
1199 int rc = NO_ERROR;
1200 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001201 uint32_t depthWidth = 0;
1202 uint32_t depthHeight = 0;
1203 if (mPDSupported) {
1204 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1205 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1206 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001207
1208 camera3_stream_t *inputStream = NULL;
1209 /*
1210 * Loop through all streams to find input stream if it exists*
1211 */
1212 for (size_t i = 0; i< streamList->num_streams; i++) {
1213 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1214 if (inputStream != NULL) {
1215 LOGE("Error, Multiple input streams requested");
1216 return -EINVAL;
1217 }
1218 inputStream = streamList->streams[i];
1219 }
1220 }
1221 /*
1222 * Loop through all streams requested in configuration
1223 * Check if unsupported sizes have been requested on any of them
1224 */
1225 for (size_t j = 0; j < streamList->num_streams; j++) {
1226 bool sizeFound = false;
1227 camera3_stream_t *newStream = streamList->streams[j];
1228
1229 uint32_t rotatedHeight = newStream->height;
1230 uint32_t rotatedWidth = newStream->width;
1231 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1232 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1233 rotatedHeight = newStream->width;
1234 rotatedWidth = newStream->height;
1235 }
1236
1237 /*
1238 * Sizes are different for each type of stream format check against
1239 * appropriate table.
1240 */
1241 switch (newStream->format) {
1242 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1243 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1244 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001245 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1246 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1247 mPDSupported) {
1248 if ((depthWidth == newStream->width) &&
1249 (depthHeight == newStream->height)) {
1250 sizeFound = true;
1251 }
1252 break;
1253 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001254 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1255 for (size_t i = 0; i < count; i++) {
1256 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1257 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1258 sizeFound = true;
1259 break;
1260 }
1261 }
1262 break;
1263 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001264 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1265 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001266 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001267 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001268 if ((depthSamplesCount == newStream->width) &&
1269 (1 == newStream->height)) {
1270 sizeFound = true;
1271 }
1272 break;
1273 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001274 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1275 /* Verify set size against generated sizes table */
1276 for (size_t i = 0; i < count; i++) {
1277 if (((int32_t)rotatedWidth ==
1278 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1279 ((int32_t)rotatedHeight ==
1280 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1281 sizeFound = true;
1282 break;
1283 }
1284 }
1285 break;
1286 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1287 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1288 default:
1289 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1290 || newStream->stream_type == CAMERA3_STREAM_INPUT
1291 || IS_USAGE_ZSL(newStream->usage)) {
1292 if (((int32_t)rotatedWidth ==
1293 gCamCapability[mCameraId]->active_array_size.width) &&
1294 ((int32_t)rotatedHeight ==
1295 gCamCapability[mCameraId]->active_array_size.height)) {
1296 sizeFound = true;
1297 break;
1298 }
1299 /* We could potentially break here to enforce ZSL stream
1300 * set from frameworks always is full active array size
1301 * but it is not clear from the spc if framework will always
1302 * follow that, also we have logic to override to full array
1303 * size, so keeping the logic lenient at the moment
1304 */
1305 }
1306 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1307 MAX_SIZES_CNT);
1308 for (size_t i = 0; i < count; i++) {
1309 if (((int32_t)rotatedWidth ==
1310 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1311 ((int32_t)rotatedHeight ==
1312 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1313 sizeFound = true;
1314 break;
1315 }
1316 }
1317 break;
1318 } /* End of switch(newStream->format) */
1319
1320 /* We error out even if a single stream has unsupported size set */
1321 if (!sizeFound) {
1322 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1323 rotatedWidth, rotatedHeight, newStream->format,
1324 gCamCapability[mCameraId]->active_array_size.width,
1325 gCamCapability[mCameraId]->active_array_size.height);
1326 rc = -EINVAL;
1327 break;
1328 }
1329 } /* End of for each stream */
1330 return rc;
1331}
1332
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001333/*===========================================================================
1334 * FUNCTION : validateUsageFlags
1335 *
1336 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1337 *
1338 * PARAMETERS :
1339 * @stream_list : streams to be configured
1340 *
1341 * RETURN :
1342 * NO_ERROR if the usage flags are supported
1343 * error code if usage flags are not supported
1344 *
1345 *==========================================================================*/
1346int QCamera3HardwareInterface::validateUsageFlags(
1347 const camera3_stream_configuration_t* streamList)
1348{
1349 for (size_t j = 0; j < streamList->num_streams; j++) {
1350 const camera3_stream_t *newStream = streamList->streams[j];
1351
1352 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1353 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1354 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1355 continue;
1356 }
1357
Jason Leec4cf5032017-05-24 18:31:41 -07001358 // Here we only care whether it's EIS3 or not
1359 char is_type_value[PROPERTY_VALUE_MAX];
1360 property_get("persist.camera.is_type", is_type_value, "4");
1361 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1362 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1363 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1364 isType = IS_TYPE_NONE;
1365
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001366 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1367 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1368 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1369 bool forcePreviewUBWC = true;
1370 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1371 forcePreviewUBWC = false;
1372 }
1373 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001374 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001375 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001376 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001377 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001378 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001379
1380 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1381 // So color spaces will always match.
1382
1383 // Check whether underlying formats of shared streams match.
1384 if (isVideo && isPreview && videoFormat != previewFormat) {
1385 LOGE("Combined video and preview usage flag is not supported");
1386 return -EINVAL;
1387 }
1388 if (isPreview && isZSL && previewFormat != zslFormat) {
1389 LOGE("Combined preview and zsl usage flag is not supported");
1390 return -EINVAL;
1391 }
1392 if (isVideo && isZSL && videoFormat != zslFormat) {
1393 LOGE("Combined video and zsl usage flag is not supported");
1394 return -EINVAL;
1395 }
1396 }
1397 return NO_ERROR;
1398}
1399
1400/*===========================================================================
1401 * FUNCTION : validateUsageFlagsForEis
1402 *
1403 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1404 *
1405 * PARAMETERS :
1406 * @stream_list : streams to be configured
1407 *
1408 * RETURN :
1409 * NO_ERROR if the usage flags are supported
1410 * error code if usage flags are not supported
1411 *
1412 *==========================================================================*/
1413int QCamera3HardwareInterface::validateUsageFlagsForEis(
1414 const camera3_stream_configuration_t* streamList)
1415{
1416 for (size_t j = 0; j < streamList->num_streams; j++) {
1417 const camera3_stream_t *newStream = streamList->streams[j];
1418
1419 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1420 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1421
1422 // Because EIS is "hard-coded" for certain use case, and current
1423 // implementation doesn't support shared preview and video on the same
1424 // stream, return failure if EIS is forced on.
1425 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1426 LOGE("Combined video and preview usage flag is not supported due to EIS");
1427 return -EINVAL;
1428 }
1429 }
1430 return NO_ERROR;
1431}
1432
Thierry Strudel3d639192016-09-09 11:52:26 -07001433/*==============================================================================
1434 * FUNCTION : isSupportChannelNeeded
1435 *
1436 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1437 *
1438 * PARAMETERS :
1439 * @stream_list : streams to be configured
1440 * @stream_config_info : the config info for streams to be configured
1441 *
1442 * RETURN : Boolen true/false decision
1443 *
1444 *==========================================================================*/
1445bool QCamera3HardwareInterface::isSupportChannelNeeded(
1446 camera3_stream_configuration_t *streamList,
1447 cam_stream_size_info_t stream_config_info)
1448{
1449 uint32_t i;
1450 bool pprocRequested = false;
1451 /* Check for conditions where PProc pipeline does not have any streams*/
1452 for (i = 0; i < stream_config_info.num_streams; i++) {
1453 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1454 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1455 pprocRequested = true;
1456 break;
1457 }
1458 }
1459
1460 if (pprocRequested == false )
1461 return true;
1462
1463 /* Dummy stream needed if only raw or jpeg streams present */
1464 for (i = 0; i < streamList->num_streams; i++) {
1465 switch(streamList->streams[i]->format) {
1466 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1467 case HAL_PIXEL_FORMAT_RAW10:
1468 case HAL_PIXEL_FORMAT_RAW16:
1469 case HAL_PIXEL_FORMAT_BLOB:
1470 break;
1471 default:
1472 return false;
1473 }
1474 }
1475 return true;
1476}
1477
1478/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001479 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001480 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001481 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 *
1483 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001484 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001485 *
1486 * RETURN : int32_t type of status
1487 * NO_ERROR -- success
1488 * none-zero failure code
1489 *
1490 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001492{
1493 int32_t rc = NO_ERROR;
1494
1495 cam_dimension_t max_dim = {0, 0};
1496 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1497 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1498 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1499 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1500 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1501 }
1502
1503 clear_metadata_buffer(mParameters);
1504
1505 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1506 max_dim);
1507 if (rc != NO_ERROR) {
1508 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1509 return rc;
1510 }
1511
1512 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1513 if (rc != NO_ERROR) {
1514 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1515 return rc;
1516 }
1517
1518 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001519 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001520
1521 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1522 mParameters);
1523 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001524 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001525 return rc;
1526 }
1527
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001528 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001529 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1530 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1531 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1532 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1533 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001534
1535 return rc;
1536}
1537
1538/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001539 * FUNCTION : getCurrentSensorModeInfo
1540 *
1541 * DESCRIPTION: Get sensor mode information that is currently selected.
1542 *
1543 * PARAMETERS :
1544 * @sensorModeInfo : sensor mode information (output)
1545 *
1546 * RETURN : int32_t type of status
1547 * NO_ERROR -- success
1548 * none-zero failure code
1549 *
1550 *==========================================================================*/
1551int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1552{
1553 int32_t rc = NO_ERROR;
1554
1555 clear_metadata_buffer(mParameters);
1556 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1557
1558 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1559 mParameters);
1560 if (rc != NO_ERROR) {
1561 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1562 return rc;
1563 }
1564
1565 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1566 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1567 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1568 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1569 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1570 sensorModeInfo.num_raw_bits);
1571
1572 return rc;
1573}
1574
1575/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001576 * FUNCTION : addToPPFeatureMask
1577 *
1578 * DESCRIPTION: add additional features to pp feature mask based on
1579 * stream type and usecase
1580 *
1581 * PARAMETERS :
1582 * @stream_format : stream type for feature mask
1583 * @stream_idx : stream idx within postprocess_mask list to change
1584 *
1585 * RETURN : NULL
1586 *
1587 *==========================================================================*/
1588void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1589 uint32_t stream_idx)
1590{
1591 char feature_mask_value[PROPERTY_VALUE_MAX];
1592 cam_feature_mask_t feature_mask;
1593 int args_converted;
1594 int property_len;
1595
1596 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001597#ifdef _LE_CAMERA_
1598 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1599 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1600 property_len = property_get("persist.camera.hal3.feature",
1601 feature_mask_value, swtnr_feature_mask_value);
1602#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001603 property_len = property_get("persist.camera.hal3.feature",
1604 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001605#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001606 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1607 (feature_mask_value[1] == 'x')) {
1608 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1609 } else {
1610 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1611 }
1612 if (1 != args_converted) {
1613 feature_mask = 0;
1614 LOGE("Wrong feature mask %s", feature_mask_value);
1615 return;
1616 }
1617
1618 switch (stream_format) {
1619 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1620 /* Add LLVD to pp feature mask only if video hint is enabled */
1621 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1622 mStreamConfigInfo.postprocess_mask[stream_idx]
1623 |= CAM_QTI_FEATURE_SW_TNR;
1624 LOGH("Added SW TNR to pp feature mask");
1625 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1626 mStreamConfigInfo.postprocess_mask[stream_idx]
1627 |= CAM_QCOM_FEATURE_LLVD;
1628 LOGH("Added LLVD SeeMore to pp feature mask");
1629 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001630 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1631 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1632 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1633 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001634 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1635 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1636 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1637 CAM_QTI_FEATURE_BINNING_CORRECTION;
1638 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001639 break;
1640 }
1641 default:
1642 break;
1643 }
1644 LOGD("PP feature mask %llx",
1645 mStreamConfigInfo.postprocess_mask[stream_idx]);
1646}
1647
1648/*==============================================================================
1649 * FUNCTION : updateFpsInPreviewBuffer
1650 *
1651 * DESCRIPTION: update FPS information in preview buffer.
1652 *
1653 * PARAMETERS :
1654 * @metadata : pointer to metadata buffer
1655 * @frame_number: frame_number to look for in pending buffer list
1656 *
1657 * RETURN : None
1658 *
1659 *==========================================================================*/
1660void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1661 uint32_t frame_number)
1662{
1663 // Mark all pending buffers for this particular request
1664 // with corresponding framerate information
1665 for (List<PendingBuffersInRequest>::iterator req =
1666 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1667 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1668 for(List<PendingBufferInfo>::iterator j =
1669 req->mPendingBufferList.begin();
1670 j != req->mPendingBufferList.end(); j++) {
1671 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1672 if ((req->frame_number == frame_number) &&
1673 (channel->getStreamTypeMask() &
1674 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1675 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1676 CAM_INTF_PARM_FPS_RANGE, metadata) {
1677 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1678 struct private_handle_t *priv_handle =
1679 (struct private_handle_t *)(*(j->buffer));
1680 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1681 }
1682 }
1683 }
1684 }
1685}
1686
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001687/*==============================================================================
1688 * FUNCTION : updateTimeStampInPendingBuffers
1689 *
1690 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1691 * of a frame number
1692 *
1693 * PARAMETERS :
1694 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1695 * @timestamp : timestamp to be set
1696 *
1697 * RETURN : None
1698 *
1699 *==========================================================================*/
1700void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1701 uint32_t frameNumber, nsecs_t timestamp)
1702{
1703 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1704 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1705 if (req->frame_number != frameNumber)
1706 continue;
1707
1708 for (auto k = req->mPendingBufferList.begin();
1709 k != req->mPendingBufferList.end(); k++ ) {
1710 struct private_handle_t *priv_handle =
1711 (struct private_handle_t *) (*(k->buffer));
1712 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1713 }
1714 }
1715 return;
1716}
1717
Thierry Strudel3d639192016-09-09 11:52:26 -07001718/*===========================================================================
1719 * FUNCTION : configureStreams
1720 *
1721 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1722 * and output streams.
1723 *
1724 * PARAMETERS :
1725 * @stream_list : streams to be configured
1726 *
1727 * RETURN :
1728 *
1729 *==========================================================================*/
1730int QCamera3HardwareInterface::configureStreams(
1731 camera3_stream_configuration_t *streamList)
1732{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001733 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001734 int rc = 0;
1735
1736 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001737 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001738 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001739 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001740
1741 return rc;
1742}
1743
1744/*===========================================================================
1745 * FUNCTION : configureStreamsPerfLocked
1746 *
1747 * DESCRIPTION: configureStreams while perfLock is held.
1748 *
1749 * PARAMETERS :
1750 * @stream_list : streams to be configured
1751 *
1752 * RETURN : int32_t type of status
1753 * NO_ERROR -- success
1754 * none-zero failure code
1755 *==========================================================================*/
1756int QCamera3HardwareInterface::configureStreamsPerfLocked(
1757 camera3_stream_configuration_t *streamList)
1758{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001759 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001760 int rc = 0;
1761
1762 // Sanity check stream_list
1763 if (streamList == NULL) {
1764 LOGE("NULL stream configuration");
1765 return BAD_VALUE;
1766 }
1767 if (streamList->streams == NULL) {
1768 LOGE("NULL stream list");
1769 return BAD_VALUE;
1770 }
1771
1772 if (streamList->num_streams < 1) {
1773 LOGE("Bad number of streams requested: %d",
1774 streamList->num_streams);
1775 return BAD_VALUE;
1776 }
1777
1778 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1779 LOGE("Maximum number of streams %d exceeded: %d",
1780 MAX_NUM_STREAMS, streamList->num_streams);
1781 return BAD_VALUE;
1782 }
1783
Jason Leec4cf5032017-05-24 18:31:41 -07001784 mOpMode = streamList->operation_mode;
1785 LOGD("mOpMode: %d", mOpMode);
1786
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001787 rc = validateUsageFlags(streamList);
1788 if (rc != NO_ERROR) {
1789 return rc;
1790 }
1791
Thierry Strudel3d639192016-09-09 11:52:26 -07001792 /* first invalidate all the steams in the mStreamList
1793 * if they appear again, they will be validated */
1794 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1795 it != mStreamInfo.end(); it++) {
1796 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1797 if (channel) {
1798 channel->stop();
1799 }
1800 (*it)->status = INVALID;
1801 }
1802
1803 if (mRawDumpChannel) {
1804 mRawDumpChannel->stop();
1805 delete mRawDumpChannel;
1806 mRawDumpChannel = NULL;
1807 }
1808
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001809 if (mHdrPlusRawSrcChannel) {
1810 mHdrPlusRawSrcChannel->stop();
1811 delete mHdrPlusRawSrcChannel;
1812 mHdrPlusRawSrcChannel = NULL;
1813 }
1814
Thierry Strudel3d639192016-09-09 11:52:26 -07001815 if (mSupportChannel)
1816 mSupportChannel->stop();
1817
1818 if (mAnalysisChannel) {
1819 mAnalysisChannel->stop();
1820 }
1821 if (mMetadataChannel) {
1822 /* If content of mStreamInfo is not 0, there is metadata stream */
1823 mMetadataChannel->stop();
1824 }
1825 if (mChannelHandle) {
1826 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001827 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001828 LOGD("stopping channel %d", mChannelHandle);
1829 }
1830
1831 pthread_mutex_lock(&mMutex);
1832
1833 // Check state
1834 switch (mState) {
1835 case INITIALIZED:
1836 case CONFIGURED:
1837 case STARTED:
1838 /* valid state */
1839 break;
1840 default:
1841 LOGE("Invalid state %d", mState);
1842 pthread_mutex_unlock(&mMutex);
1843 return -ENODEV;
1844 }
1845
1846 /* Check whether we have video stream */
1847 m_bIs4KVideo = false;
1848 m_bIsVideo = false;
1849 m_bEisSupportedSize = false;
1850 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001851 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001852 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001853 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001854 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001855 uint32_t videoWidth = 0U;
1856 uint32_t videoHeight = 0U;
1857 size_t rawStreamCnt = 0;
1858 size_t stallStreamCnt = 0;
1859 size_t processedStreamCnt = 0;
1860 // Number of streams on ISP encoder path
1861 size_t numStreamsOnEncoder = 0;
1862 size_t numYuv888OnEncoder = 0;
1863 bool bYuv888OverrideJpeg = false;
1864 cam_dimension_t largeYuv888Size = {0, 0};
1865 cam_dimension_t maxViewfinderSize = {0, 0};
1866 bool bJpegExceeds4K = false;
1867 bool bJpegOnEncoder = false;
1868 bool bUseCommonFeatureMask = false;
1869 cam_feature_mask_t commonFeatureMask = 0;
1870 bool bSmallJpegSize = false;
1871 uint32_t width_ratio;
1872 uint32_t height_ratio;
1873 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1874 camera3_stream_t *inputStream = NULL;
1875 bool isJpeg = false;
1876 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001877 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001878 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001879
1880 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1881
1882 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001883 uint8_t eis_prop_set;
1884 uint32_t maxEisWidth = 0;
1885 uint32_t maxEisHeight = 0;
1886
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001887 // Initialize all instant AEC related variables
1888 mInstantAEC = false;
1889 mResetInstantAEC = false;
1890 mInstantAECSettledFrameNumber = 0;
1891 mAecSkipDisplayFrameBound = 0;
1892 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001893 mCurrFeatureState = 0;
1894 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001895
Thierry Strudel3d639192016-09-09 11:52:26 -07001896 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1897
1898 size_t count = IS_TYPE_MAX;
1899 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1900 for (size_t i = 0; i < count; i++) {
1901 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001902 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1903 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001904 break;
1905 }
1906 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001907
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001908 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001909 maxEisWidth = MAX_EIS_WIDTH;
1910 maxEisHeight = MAX_EIS_HEIGHT;
1911 }
1912
1913 /* EIS setprop control */
1914 char eis_prop[PROPERTY_VALUE_MAX];
1915 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001916 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001917 eis_prop_set = (uint8_t)atoi(eis_prop);
1918
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001919 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001920 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1921
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001922 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1923 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001924
Thierry Strudel3d639192016-09-09 11:52:26 -07001925 /* stream configurations */
1926 for (size_t i = 0; i < streamList->num_streams; i++) {
1927 camera3_stream_t *newStream = streamList->streams[i];
1928 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1929 "height = %d, rotation = %d, usage = 0x%x",
1930 i, newStream->stream_type, newStream->format,
1931 newStream->width, newStream->height, newStream->rotation,
1932 newStream->usage);
1933 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1934 newStream->stream_type == CAMERA3_STREAM_INPUT){
1935 isZsl = true;
1936 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001937 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1938 IS_USAGE_PREVIEW(newStream->usage)) {
1939 isPreview = true;
1940 }
1941
Thierry Strudel3d639192016-09-09 11:52:26 -07001942 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1943 inputStream = newStream;
1944 }
1945
Emilian Peev7650c122017-01-19 08:24:33 -08001946 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1947 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001948 isJpeg = true;
1949 jpegSize.width = newStream->width;
1950 jpegSize.height = newStream->height;
1951 if (newStream->width > VIDEO_4K_WIDTH ||
1952 newStream->height > VIDEO_4K_HEIGHT)
1953 bJpegExceeds4K = true;
1954 }
1955
1956 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1957 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1958 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001959 // In HAL3 we can have multiple different video streams.
1960 // The variables video width and height are used below as
1961 // dimensions of the biggest of them
1962 if (videoWidth < newStream->width ||
1963 videoHeight < newStream->height) {
1964 videoWidth = newStream->width;
1965 videoHeight = newStream->height;
1966 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001967 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1968 (VIDEO_4K_HEIGHT <= newStream->height)) {
1969 m_bIs4KVideo = true;
1970 }
1971 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1972 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001973
Thierry Strudel3d639192016-09-09 11:52:26 -07001974 }
1975 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1976 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1977 switch (newStream->format) {
1978 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001979 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1980 depthPresent = true;
1981 break;
1982 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001983 stallStreamCnt++;
1984 if (isOnEncoder(maxViewfinderSize, newStream->width,
1985 newStream->height)) {
1986 numStreamsOnEncoder++;
1987 bJpegOnEncoder = true;
1988 }
1989 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1990 newStream->width);
1991 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1992 newStream->height);;
1993 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1994 "FATAL: max_downscale_factor cannot be zero and so assert");
1995 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1996 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1997 LOGH("Setting small jpeg size flag to true");
1998 bSmallJpegSize = true;
1999 }
2000 break;
2001 case HAL_PIXEL_FORMAT_RAW10:
2002 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2003 case HAL_PIXEL_FORMAT_RAW16:
2004 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002005 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2006 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2007 pdStatCount++;
2008 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002009 break;
2010 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2011 processedStreamCnt++;
2012 if (isOnEncoder(maxViewfinderSize, newStream->width,
2013 newStream->height)) {
2014 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2015 !IS_USAGE_ZSL(newStream->usage)) {
2016 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2017 }
2018 numStreamsOnEncoder++;
2019 }
2020 break;
2021 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2022 processedStreamCnt++;
2023 if (isOnEncoder(maxViewfinderSize, newStream->width,
2024 newStream->height)) {
2025 // If Yuv888 size is not greater than 4K, set feature mask
2026 // to SUPERSET so that it support concurrent request on
2027 // YUV and JPEG.
2028 if (newStream->width <= VIDEO_4K_WIDTH &&
2029 newStream->height <= VIDEO_4K_HEIGHT) {
2030 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2031 }
2032 numStreamsOnEncoder++;
2033 numYuv888OnEncoder++;
2034 largeYuv888Size.width = newStream->width;
2035 largeYuv888Size.height = newStream->height;
2036 }
2037 break;
2038 default:
2039 processedStreamCnt++;
2040 if (isOnEncoder(maxViewfinderSize, newStream->width,
2041 newStream->height)) {
2042 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2043 numStreamsOnEncoder++;
2044 }
2045 break;
2046 }
2047
2048 }
2049 }
2050
2051 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2052 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2053 !m_bIsVideo) {
2054 m_bEisEnable = false;
2055 }
2056
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002057 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2058 pthread_mutex_unlock(&mMutex);
2059 return -EINVAL;
2060 }
2061
Thierry Strudel54dc9782017-02-15 12:12:10 -08002062 uint8_t forceEnableTnr = 0;
2063 char tnr_prop[PROPERTY_VALUE_MAX];
2064 memset(tnr_prop, 0, sizeof(tnr_prop));
2065 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2066 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2067
Thierry Strudel3d639192016-09-09 11:52:26 -07002068 /* Logic to enable/disable TNR based on specific config size/etc.*/
2069 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002070 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2071 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002072 else if (forceEnableTnr)
2073 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002074
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002075 char videoHdrProp[PROPERTY_VALUE_MAX];
2076 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2077 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2078 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2079
2080 if (hdr_mode_prop == 1 && m_bIsVideo &&
2081 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2082 m_bVideoHdrEnabled = true;
2083 else
2084 m_bVideoHdrEnabled = false;
2085
2086
Thierry Strudel3d639192016-09-09 11:52:26 -07002087 /* Check if num_streams is sane */
2088 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2089 rawStreamCnt > MAX_RAW_STREAMS ||
2090 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2091 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2092 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2093 pthread_mutex_unlock(&mMutex);
2094 return -EINVAL;
2095 }
2096 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002097 if (isZsl && m_bIs4KVideo) {
2098 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002099 pthread_mutex_unlock(&mMutex);
2100 return -EINVAL;
2101 }
2102 /* Check if stream sizes are sane */
2103 if (numStreamsOnEncoder > 2) {
2104 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2105 pthread_mutex_unlock(&mMutex);
2106 return -EINVAL;
2107 } else if (1 < numStreamsOnEncoder){
2108 bUseCommonFeatureMask = true;
2109 LOGH("Multiple streams above max viewfinder size, common mask needed");
2110 }
2111
2112 /* Check if BLOB size is greater than 4k in 4k recording case */
2113 if (m_bIs4KVideo && bJpegExceeds4K) {
2114 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2115 pthread_mutex_unlock(&mMutex);
2116 return -EINVAL;
2117 }
2118
Emilian Peev7650c122017-01-19 08:24:33 -08002119 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2120 depthPresent) {
2121 LOGE("HAL doesn't support depth streams in HFR mode!");
2122 pthread_mutex_unlock(&mMutex);
2123 return -EINVAL;
2124 }
2125
Thierry Strudel3d639192016-09-09 11:52:26 -07002126 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2127 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2128 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2129 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2130 // configurations:
2131 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2132 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2133 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2134 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2135 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2136 __func__);
2137 pthread_mutex_unlock(&mMutex);
2138 return -EINVAL;
2139 }
2140
2141 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2142 // the YUV stream's size is greater or equal to the JPEG size, set common
2143 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2144 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2145 jpegSize.width, jpegSize.height) &&
2146 largeYuv888Size.width > jpegSize.width &&
2147 largeYuv888Size.height > jpegSize.height) {
2148 bYuv888OverrideJpeg = true;
2149 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2150 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2151 }
2152
2153 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2154 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2155 commonFeatureMask);
2156 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2157 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2158
2159 rc = validateStreamDimensions(streamList);
2160 if (rc == NO_ERROR) {
2161 rc = validateStreamRotations(streamList);
2162 }
2163 if (rc != NO_ERROR) {
2164 LOGE("Invalid stream configuration requested!");
2165 pthread_mutex_unlock(&mMutex);
2166 return rc;
2167 }
2168
Emilian Peev0f3c3162017-03-15 12:57:46 +00002169 if (1 < pdStatCount) {
2170 LOGE("HAL doesn't support multiple PD streams");
2171 pthread_mutex_unlock(&mMutex);
2172 return -EINVAL;
2173 }
2174
2175 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2176 (1 == pdStatCount)) {
2177 LOGE("HAL doesn't support PD streams in HFR mode!");
2178 pthread_mutex_unlock(&mMutex);
2179 return -EINVAL;
2180 }
2181
Thierry Strudel3d639192016-09-09 11:52:26 -07002182 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2183 for (size_t i = 0; i < streamList->num_streams; i++) {
2184 camera3_stream_t *newStream = streamList->streams[i];
2185 LOGH("newStream type = %d, stream format = %d "
2186 "stream size : %d x %d, stream rotation = %d",
2187 newStream->stream_type, newStream->format,
2188 newStream->width, newStream->height, newStream->rotation);
2189 //if the stream is in the mStreamList validate it
2190 bool stream_exists = false;
2191 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2192 it != mStreamInfo.end(); it++) {
2193 if ((*it)->stream == newStream) {
2194 QCamera3ProcessingChannel *channel =
2195 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2196 stream_exists = true;
2197 if (channel)
2198 delete channel;
2199 (*it)->status = VALID;
2200 (*it)->stream->priv = NULL;
2201 (*it)->channel = NULL;
2202 }
2203 }
2204 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2205 //new stream
2206 stream_info_t* stream_info;
2207 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2208 if (!stream_info) {
2209 LOGE("Could not allocate stream info");
2210 rc = -ENOMEM;
2211 pthread_mutex_unlock(&mMutex);
2212 return rc;
2213 }
2214 stream_info->stream = newStream;
2215 stream_info->status = VALID;
2216 stream_info->channel = NULL;
2217 mStreamInfo.push_back(stream_info);
2218 }
2219 /* Covers Opaque ZSL and API1 F/W ZSL */
2220 if (IS_USAGE_ZSL(newStream->usage)
2221 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2222 if (zslStream != NULL) {
2223 LOGE("Multiple input/reprocess streams requested!");
2224 pthread_mutex_unlock(&mMutex);
2225 return BAD_VALUE;
2226 }
2227 zslStream = newStream;
2228 }
2229 /* Covers YUV reprocess */
2230 if (inputStream != NULL) {
2231 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2232 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2233 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2234 && inputStream->width == newStream->width
2235 && inputStream->height == newStream->height) {
2236 if (zslStream != NULL) {
2237 /* This scenario indicates multiple YUV streams with same size
2238 * as input stream have been requested, since zsl stream handle
2239 * is solely use for the purpose of overriding the size of streams
2240 * which share h/w streams we will just make a guess here as to
2241 * which of the stream is a ZSL stream, this will be refactored
2242 * once we make generic logic for streams sharing encoder output
2243 */
2244 LOGH("Warning, Multiple ip/reprocess streams requested!");
2245 }
2246 zslStream = newStream;
2247 }
2248 }
2249 }
2250
2251 /* If a zsl stream is set, we know that we have configured at least one input or
2252 bidirectional stream */
2253 if (NULL != zslStream) {
2254 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2255 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2256 mInputStreamInfo.format = zslStream->format;
2257 mInputStreamInfo.usage = zslStream->usage;
2258 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2259 mInputStreamInfo.dim.width,
2260 mInputStreamInfo.dim.height,
2261 mInputStreamInfo.format, mInputStreamInfo.usage);
2262 }
2263
2264 cleanAndSortStreamInfo();
2265 if (mMetadataChannel) {
2266 delete mMetadataChannel;
2267 mMetadataChannel = NULL;
2268 }
2269 if (mSupportChannel) {
2270 delete mSupportChannel;
2271 mSupportChannel = NULL;
2272 }
2273
2274 if (mAnalysisChannel) {
2275 delete mAnalysisChannel;
2276 mAnalysisChannel = NULL;
2277 }
2278
2279 if (mDummyBatchChannel) {
2280 delete mDummyBatchChannel;
2281 mDummyBatchChannel = NULL;
2282 }
2283
Emilian Peev7650c122017-01-19 08:24:33 -08002284 if (mDepthChannel) {
2285 mDepthChannel = NULL;
2286 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002287 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002288
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002289 mShutterDispatcher.clear();
2290 mOutputBufferDispatcher.clear();
2291
Thierry Strudel2896d122017-02-23 19:18:03 -08002292 char is_type_value[PROPERTY_VALUE_MAX];
2293 property_get("persist.camera.is_type", is_type_value, "4");
2294 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2295
Binhao Line406f062017-05-03 14:39:44 -07002296 char property_value[PROPERTY_VALUE_MAX];
2297 property_get("persist.camera.gzoom.at", property_value, "0");
2298 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002299 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2300 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2301 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2302 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002303
2304 property_get("persist.camera.gzoom.4k", property_value, "0");
2305 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2306
Thierry Strudel3d639192016-09-09 11:52:26 -07002307 //Create metadata channel and initialize it
2308 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2309 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2310 gCamCapability[mCameraId]->color_arrangement);
2311 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2312 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002313 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002314 if (mMetadataChannel == NULL) {
2315 LOGE("failed to allocate metadata channel");
2316 rc = -ENOMEM;
2317 pthread_mutex_unlock(&mMutex);
2318 return rc;
2319 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002320 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002321 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2322 if (rc < 0) {
2323 LOGE("metadata channel initialization failed");
2324 delete mMetadataChannel;
2325 mMetadataChannel = NULL;
2326 pthread_mutex_unlock(&mMutex);
2327 return rc;
2328 }
2329
Thierry Strudel2896d122017-02-23 19:18:03 -08002330 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002331 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002332 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002333 // Keep track of preview/video streams indices.
2334 // There could be more than one preview streams, but only one video stream.
2335 int32_t video_stream_idx = -1;
2336 int32_t preview_stream_idx[streamList->num_streams];
2337 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002338 bool previewTnr[streamList->num_streams];
2339 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2340 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2341 // Loop through once to determine preview TNR conditions before creating channels.
2342 for (size_t i = 0; i < streamList->num_streams; i++) {
2343 camera3_stream_t *newStream = streamList->streams[i];
2344 uint32_t stream_usage = newStream->usage;
2345 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2346 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2347 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2348 video_stream_idx = (int32_t)i;
2349 else
2350 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2351 }
2352 }
2353 // By default, preview stream TNR is disabled.
2354 // Enable TNR to the preview stream if all conditions below are satisfied:
2355 // 1. preview resolution == video resolution.
2356 // 2. video stream TNR is enabled.
2357 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2358 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2359 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2360 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2361 if (m_bTnrEnabled && m_bTnrVideo &&
2362 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2363 video_stream->width == preview_stream->width &&
2364 video_stream->height == preview_stream->height) {
2365 previewTnr[preview_stream_idx[i]] = true;
2366 }
2367 }
2368
Thierry Strudel3d639192016-09-09 11:52:26 -07002369 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2370 /* Allocate channel objects for the requested streams */
2371 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002372
Thierry Strudel3d639192016-09-09 11:52:26 -07002373 camera3_stream_t *newStream = streamList->streams[i];
2374 uint32_t stream_usage = newStream->usage;
2375 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2376 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2377 struct camera_info *p_info = NULL;
2378 pthread_mutex_lock(&gCamLock);
2379 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2380 pthread_mutex_unlock(&gCamLock);
2381 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2382 || IS_USAGE_ZSL(newStream->usage)) &&
2383 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002384 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002385 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002386 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2387 if (bUseCommonFeatureMask)
2388 zsl_ppmask = commonFeatureMask;
2389 else
2390 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002391 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002392 if (numStreamsOnEncoder > 0)
2393 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2394 else
2395 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002396 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002397 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002398 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002399 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002400 LOGH("Input stream configured, reprocess config");
2401 } else {
2402 //for non zsl streams find out the format
2403 switch (newStream->format) {
2404 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2405 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002406 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002407 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2408 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2409 /* add additional features to pp feature mask */
2410 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2411 mStreamConfigInfo.num_streams);
2412
2413 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2414 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2415 CAM_STREAM_TYPE_VIDEO;
2416 if (m_bTnrEnabled && m_bTnrVideo) {
2417 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2418 CAM_QCOM_FEATURE_CPP_TNR;
2419 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2420 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2421 ~CAM_QCOM_FEATURE_CDS;
2422 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002423 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2424 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2425 CAM_QTI_FEATURE_PPEISCORE;
2426 }
Binhao Line406f062017-05-03 14:39:44 -07002427 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2428 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2429 CAM_QCOM_FEATURE_GOOG_ZOOM;
2430 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002431 } else {
2432 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2433 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002434 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002435 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2436 CAM_QCOM_FEATURE_CPP_TNR;
2437 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2438 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2439 ~CAM_QCOM_FEATURE_CDS;
2440 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002441 if(!m_bSwTnrPreview) {
2442 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2443 ~CAM_QTI_FEATURE_SW_TNR;
2444 }
Binhao Line406f062017-05-03 14:39:44 -07002445 if (is_goog_zoom_preview_enabled) {
2446 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2447 CAM_QCOM_FEATURE_GOOG_ZOOM;
2448 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002449 padding_info.width_padding = mSurfaceStridePadding;
2450 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002451 previewSize.width = (int32_t)newStream->width;
2452 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002453 }
2454 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2455 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2456 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2457 newStream->height;
2458 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2459 newStream->width;
2460 }
2461 }
2462 break;
2463 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002464 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002465 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2466 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2467 if (bUseCommonFeatureMask)
2468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2469 commonFeatureMask;
2470 else
2471 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2472 CAM_QCOM_FEATURE_NONE;
2473 } else {
2474 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2475 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2476 }
2477 break;
2478 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002479 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002480 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2481 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2482 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2483 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2484 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002485 /* Remove rotation if it is not supported
2486 for 4K LiveVideo snapshot case (online processing) */
2487 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2488 CAM_QCOM_FEATURE_ROTATION)) {
2489 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2490 &= ~CAM_QCOM_FEATURE_ROTATION;
2491 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002492 } else {
2493 if (bUseCommonFeatureMask &&
2494 isOnEncoder(maxViewfinderSize, newStream->width,
2495 newStream->height)) {
2496 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2497 } else {
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2499 }
2500 }
2501 if (isZsl) {
2502 if (zslStream) {
2503 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2504 (int32_t)zslStream->width;
2505 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2506 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002507 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2508 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002509 } else {
2510 LOGE("Error, No ZSL stream identified");
2511 pthread_mutex_unlock(&mMutex);
2512 return -EINVAL;
2513 }
2514 } else if (m_bIs4KVideo) {
2515 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2516 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2517 } else if (bYuv888OverrideJpeg) {
2518 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2519 (int32_t)largeYuv888Size.width;
2520 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2521 (int32_t)largeYuv888Size.height;
2522 }
2523 break;
2524 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2525 case HAL_PIXEL_FORMAT_RAW16:
2526 case HAL_PIXEL_FORMAT_RAW10:
2527 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2528 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2529 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002530 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2531 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2532 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2533 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2534 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2535 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2536 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2537 gCamCapability[mCameraId]->dt[mPDIndex];
2538 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2539 gCamCapability[mCameraId]->vc[mPDIndex];
2540 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002541 break;
2542 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002543 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002544 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2545 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2546 break;
2547 }
2548 }
2549
2550 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2551 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2552 gCamCapability[mCameraId]->color_arrangement);
2553
2554 if (newStream->priv == NULL) {
2555 //New stream, construct channel
2556 switch (newStream->stream_type) {
2557 case CAMERA3_STREAM_INPUT:
2558 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2559 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2560 break;
2561 case CAMERA3_STREAM_BIDIRECTIONAL:
2562 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2563 GRALLOC_USAGE_HW_CAMERA_WRITE;
2564 break;
2565 case CAMERA3_STREAM_OUTPUT:
2566 /* For video encoding stream, set read/write rarely
2567 * flag so that they may be set to un-cached */
2568 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2569 newStream->usage |=
2570 (GRALLOC_USAGE_SW_READ_RARELY |
2571 GRALLOC_USAGE_SW_WRITE_RARELY |
2572 GRALLOC_USAGE_HW_CAMERA_WRITE);
2573 else if (IS_USAGE_ZSL(newStream->usage))
2574 {
2575 LOGD("ZSL usage flag skipping");
2576 }
2577 else if (newStream == zslStream
2578 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2579 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2580 } else
2581 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2582 break;
2583 default:
2584 LOGE("Invalid stream_type %d", newStream->stream_type);
2585 break;
2586 }
2587
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002588 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002589 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2590 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2591 QCamera3ProcessingChannel *channel = NULL;
2592 switch (newStream->format) {
2593 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2594 if ((newStream->usage &
2595 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2596 (streamList->operation_mode ==
2597 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2598 ) {
2599 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2600 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002601 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002602 this,
2603 newStream,
2604 (cam_stream_type_t)
2605 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2606 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2607 mMetadataChannel,
2608 0); //heap buffers are not required for HFR video channel
2609 if (channel == NULL) {
2610 LOGE("allocation of channel failed");
2611 pthread_mutex_unlock(&mMutex);
2612 return -ENOMEM;
2613 }
2614 //channel->getNumBuffers() will return 0 here so use
2615 //MAX_INFLIGH_HFR_REQUESTS
2616 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2617 newStream->priv = channel;
2618 LOGI("num video buffers in HFR mode: %d",
2619 MAX_INFLIGHT_HFR_REQUESTS);
2620 } else {
2621 /* Copy stream contents in HFR preview only case to create
2622 * dummy batch channel so that sensor streaming is in
2623 * HFR mode */
2624 if (!m_bIsVideo && (streamList->operation_mode ==
2625 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2626 mDummyBatchStream = *newStream;
2627 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002628 int bufferCount = MAX_INFLIGHT_REQUESTS;
2629 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2630 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002631 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2632 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2633 bufferCount = m_bIs4KVideo ?
2634 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2635 }
2636
Thierry Strudel2896d122017-02-23 19:18:03 -08002637 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002638 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2639 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002640 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002641 this,
2642 newStream,
2643 (cam_stream_type_t)
2644 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2645 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2646 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002647 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002648 if (channel == NULL) {
2649 LOGE("allocation of channel failed");
2650 pthread_mutex_unlock(&mMutex);
2651 return -ENOMEM;
2652 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002653 /* disable UBWC for preview, though supported,
2654 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002655 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002656 (previewSize.width == (int32_t)videoWidth)&&
2657 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002658 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002659 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002660 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002661 /* When goog_zoom is linked to the preview or video stream,
2662 * disable ubwc to the linked stream */
2663 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2664 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2665 channel->setUBWCEnabled(false);
2666 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002667 newStream->max_buffers = channel->getNumBuffers();
2668 newStream->priv = channel;
2669 }
2670 break;
2671 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2672 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2673 mChannelHandle,
2674 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002675 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002676 this,
2677 newStream,
2678 (cam_stream_type_t)
2679 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2680 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2681 mMetadataChannel);
2682 if (channel == NULL) {
2683 LOGE("allocation of YUV channel failed");
2684 pthread_mutex_unlock(&mMutex);
2685 return -ENOMEM;
2686 }
2687 newStream->max_buffers = channel->getNumBuffers();
2688 newStream->priv = channel;
2689 break;
2690 }
2691 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2692 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002693 case HAL_PIXEL_FORMAT_RAW10: {
2694 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2695 (HAL_DATASPACE_DEPTH != newStream->data_space))
2696 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002697 mRawChannel = new QCamera3RawChannel(
2698 mCameraHandle->camera_handle, mChannelHandle,
2699 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002700 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002701 this, newStream,
2702 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002703 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002704 if (mRawChannel == NULL) {
2705 LOGE("allocation of raw channel failed");
2706 pthread_mutex_unlock(&mMutex);
2707 return -ENOMEM;
2708 }
2709 newStream->max_buffers = mRawChannel->getNumBuffers();
2710 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2711 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002712 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002713 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002714 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2715 mDepthChannel = new QCamera3DepthChannel(
2716 mCameraHandle->camera_handle, mChannelHandle,
2717 mCameraHandle->ops, NULL, NULL, &padding_info,
2718 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2719 mMetadataChannel);
2720 if (NULL == mDepthChannel) {
2721 LOGE("Allocation of depth channel failed");
2722 pthread_mutex_unlock(&mMutex);
2723 return NO_MEMORY;
2724 }
2725 newStream->priv = mDepthChannel;
2726 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2727 } else {
2728 // Max live snapshot inflight buffer is 1. This is to mitigate
2729 // frame drop issues for video snapshot. The more buffers being
2730 // allocated, the more frame drops there are.
2731 mPictureChannel = new QCamera3PicChannel(
2732 mCameraHandle->camera_handle, mChannelHandle,
2733 mCameraHandle->ops, captureResultCb,
2734 setBufferErrorStatus, &padding_info, this, newStream,
2735 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2736 m_bIs4KVideo, isZsl, mMetadataChannel,
2737 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2738 if (mPictureChannel == NULL) {
2739 LOGE("allocation of channel failed");
2740 pthread_mutex_unlock(&mMutex);
2741 return -ENOMEM;
2742 }
2743 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2744 newStream->max_buffers = mPictureChannel->getNumBuffers();
2745 mPictureChannel->overrideYuvSize(
2746 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2747 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002748 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002749 break;
2750
2751 default:
2752 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002753 pthread_mutex_unlock(&mMutex);
2754 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002755 }
2756 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2757 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2758 } else {
2759 LOGE("Error, Unknown stream type");
2760 pthread_mutex_unlock(&mMutex);
2761 return -EINVAL;
2762 }
2763
2764 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002765 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002766 // Here we only care whether it's EIS3 or not
2767 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2768 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2769 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2770 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002771 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002772 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002773 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002774 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2775 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2776 }
2777 }
2778
2779 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2780 it != mStreamInfo.end(); it++) {
2781 if ((*it)->stream == newStream) {
2782 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2783 break;
2784 }
2785 }
2786 } else {
2787 // Channel already exists for this stream
2788 // Do nothing for now
2789 }
2790 padding_info = gCamCapability[mCameraId]->padding_info;
2791
Emilian Peev7650c122017-01-19 08:24:33 -08002792 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002793 * since there is no real stream associated with it
2794 */
Emilian Peev7650c122017-01-19 08:24:33 -08002795 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002796 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2797 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002798 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002799 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002800 }
2801
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002802 // Let buffer dispatcher know the configured streams.
2803 mOutputBufferDispatcher.configureStreams(streamList);
2804
Thierry Strudel2896d122017-02-23 19:18:03 -08002805 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2806 onlyRaw = false;
2807 }
2808
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002809 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002810 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002811 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002812 cam_analysis_info_t analysisInfo;
2813 int32_t ret = NO_ERROR;
2814 ret = mCommon.getAnalysisInfo(
2815 FALSE,
2816 analysisFeatureMask,
2817 &analysisInfo);
2818 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002819 cam_color_filter_arrangement_t analysis_color_arrangement =
2820 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2821 CAM_FILTER_ARRANGEMENT_Y :
2822 gCamCapability[mCameraId]->color_arrangement);
2823 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2824 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002825 cam_dimension_t analysisDim;
2826 analysisDim = mCommon.getMatchingDimension(previewSize,
2827 analysisInfo.analysis_recommended_res);
2828
2829 mAnalysisChannel = new QCamera3SupportChannel(
2830 mCameraHandle->camera_handle,
2831 mChannelHandle,
2832 mCameraHandle->ops,
2833 &analysisInfo.analysis_padding_info,
2834 analysisFeatureMask,
2835 CAM_STREAM_TYPE_ANALYSIS,
2836 &analysisDim,
2837 (analysisInfo.analysis_format
2838 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2839 : CAM_FORMAT_YUV_420_NV21),
2840 analysisInfo.hw_analysis_supported,
2841 gCamCapability[mCameraId]->color_arrangement,
2842 this,
2843 0); // force buffer count to 0
2844 } else {
2845 LOGW("getAnalysisInfo failed, ret = %d", ret);
2846 }
2847 if (!mAnalysisChannel) {
2848 LOGW("Analysis channel cannot be created");
2849 }
2850 }
2851
Thierry Strudel3d639192016-09-09 11:52:26 -07002852 //RAW DUMP channel
2853 if (mEnableRawDump && isRawStreamRequested == false){
2854 cam_dimension_t rawDumpSize;
2855 rawDumpSize = getMaxRawSize(mCameraId);
2856 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2857 setPAAFSupport(rawDumpFeatureMask,
2858 CAM_STREAM_TYPE_RAW,
2859 gCamCapability[mCameraId]->color_arrangement);
2860 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2861 mChannelHandle,
2862 mCameraHandle->ops,
2863 rawDumpSize,
2864 &padding_info,
2865 this, rawDumpFeatureMask);
2866 if (!mRawDumpChannel) {
2867 LOGE("Raw Dump channel cannot be created");
2868 pthread_mutex_unlock(&mMutex);
2869 return -ENOMEM;
2870 }
2871 }
2872
Thierry Strudel3d639192016-09-09 11:52:26 -07002873 if (mAnalysisChannel) {
2874 cam_analysis_info_t analysisInfo;
2875 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2876 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2877 CAM_STREAM_TYPE_ANALYSIS;
2878 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2879 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002880 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002881 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2882 &analysisInfo);
2883 if (rc != NO_ERROR) {
2884 LOGE("getAnalysisInfo failed, ret = %d", rc);
2885 pthread_mutex_unlock(&mMutex);
2886 return rc;
2887 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002888 cam_color_filter_arrangement_t analysis_color_arrangement =
2889 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2890 CAM_FILTER_ARRANGEMENT_Y :
2891 gCamCapability[mCameraId]->color_arrangement);
2892 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2893 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2894 analysis_color_arrangement);
2895
Thierry Strudel3d639192016-09-09 11:52:26 -07002896 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002897 mCommon.getMatchingDimension(previewSize,
2898 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002899 mStreamConfigInfo.num_streams++;
2900 }
2901
Thierry Strudel2896d122017-02-23 19:18:03 -08002902 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 cam_analysis_info_t supportInfo;
2904 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2905 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2906 setPAAFSupport(callbackFeatureMask,
2907 CAM_STREAM_TYPE_CALLBACK,
2908 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002909 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002910 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002911 if (ret != NO_ERROR) {
2912 /* Ignore the error for Mono camera
2913 * because the PAAF bit mask is only set
2914 * for CAM_STREAM_TYPE_ANALYSIS stream type
2915 */
2916 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2917 LOGW("getAnalysisInfo failed, ret = %d", ret);
2918 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002919 }
2920 mSupportChannel = new QCamera3SupportChannel(
2921 mCameraHandle->camera_handle,
2922 mChannelHandle,
2923 mCameraHandle->ops,
2924 &gCamCapability[mCameraId]->padding_info,
2925 callbackFeatureMask,
2926 CAM_STREAM_TYPE_CALLBACK,
2927 &QCamera3SupportChannel::kDim,
2928 CAM_FORMAT_YUV_420_NV21,
2929 supportInfo.hw_analysis_supported,
2930 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002931 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002932 if (!mSupportChannel) {
2933 LOGE("dummy channel cannot be created");
2934 pthread_mutex_unlock(&mMutex);
2935 return -ENOMEM;
2936 }
2937 }
2938
2939 if (mSupportChannel) {
2940 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2941 QCamera3SupportChannel::kDim;
2942 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2943 CAM_STREAM_TYPE_CALLBACK;
2944 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2945 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2946 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2947 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2948 gCamCapability[mCameraId]->color_arrangement);
2949 mStreamConfigInfo.num_streams++;
2950 }
2951
2952 if (mRawDumpChannel) {
2953 cam_dimension_t rawSize;
2954 rawSize = getMaxRawSize(mCameraId);
2955 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2956 rawSize;
2957 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2958 CAM_STREAM_TYPE_RAW;
2959 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2960 CAM_QCOM_FEATURE_NONE;
2961 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2962 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2963 gCamCapability[mCameraId]->color_arrangement);
2964 mStreamConfigInfo.num_streams++;
2965 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002966
2967 if (mHdrPlusRawSrcChannel) {
2968 cam_dimension_t rawSize;
2969 rawSize = getMaxRawSize(mCameraId);
2970 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2971 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2972 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2973 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2974 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2975 gCamCapability[mCameraId]->color_arrangement);
2976 mStreamConfigInfo.num_streams++;
2977 }
2978
Thierry Strudel3d639192016-09-09 11:52:26 -07002979 /* In HFR mode, if video stream is not added, create a dummy channel so that
2980 * ISP can create a batch mode even for preview only case. This channel is
2981 * never 'start'ed (no stream-on), it is only 'initialized' */
2982 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2983 !m_bIsVideo) {
2984 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2985 setPAAFSupport(dummyFeatureMask,
2986 CAM_STREAM_TYPE_VIDEO,
2987 gCamCapability[mCameraId]->color_arrangement);
2988 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2989 mChannelHandle,
2990 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002991 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002992 this,
2993 &mDummyBatchStream,
2994 CAM_STREAM_TYPE_VIDEO,
2995 dummyFeatureMask,
2996 mMetadataChannel);
2997 if (NULL == mDummyBatchChannel) {
2998 LOGE("creation of mDummyBatchChannel failed."
2999 "Preview will use non-hfr sensor mode ");
3000 }
3001 }
3002 if (mDummyBatchChannel) {
3003 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3004 mDummyBatchStream.width;
3005 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3006 mDummyBatchStream.height;
3007 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3008 CAM_STREAM_TYPE_VIDEO;
3009 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3010 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3011 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3012 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3013 gCamCapability[mCameraId]->color_arrangement);
3014 mStreamConfigInfo.num_streams++;
3015 }
3016
3017 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3018 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003019 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003020 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003021
3022 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3023 for (pendingRequestIterator i = mPendingRequestsList.begin();
3024 i != mPendingRequestsList.end();) {
3025 i = erasePendingRequest(i);
3026 }
3027 mPendingFrameDropList.clear();
3028 // Initialize/Reset the pending buffers list
3029 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3030 req.mPendingBufferList.clear();
3031 }
3032 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003033 mExpectedInflightDuration = 0;
3034 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003035
Thierry Strudel3d639192016-09-09 11:52:26 -07003036 mCurJpegMeta.clear();
3037 //Get min frame duration for this streams configuration
3038 deriveMinFrameDuration();
3039
Chien-Yu Chenee335912017-02-09 17:53:20 -08003040 mFirstPreviewIntentSeen = false;
3041
3042 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003043 {
3044 Mutex::Autolock l(gHdrPlusClientLock);
3045 disableHdrPlusModeLocked();
3046 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003047
Thierry Strudel3d639192016-09-09 11:52:26 -07003048 // Update state
3049 mState = CONFIGURED;
3050
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003051 mFirstMetadataCallback = true;
3052
Thierry Strudel3d639192016-09-09 11:52:26 -07003053 pthread_mutex_unlock(&mMutex);
3054
3055 return rc;
3056}
3057
3058/*===========================================================================
3059 * FUNCTION : validateCaptureRequest
3060 *
3061 * DESCRIPTION: validate a capture request from camera service
3062 *
3063 * PARAMETERS :
3064 * @request : request from framework to process
3065 *
3066 * RETURN :
3067 *
3068 *==========================================================================*/
3069int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003070 camera3_capture_request_t *request,
3071 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003072{
3073 ssize_t idx = 0;
3074 const camera3_stream_buffer_t *b;
3075 CameraMetadata meta;
3076
3077 /* Sanity check the request */
3078 if (request == NULL) {
3079 LOGE("NULL capture request");
3080 return BAD_VALUE;
3081 }
3082
3083 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3084 /*settings cannot be null for the first request*/
3085 return BAD_VALUE;
3086 }
3087
3088 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003089 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3090 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003091 LOGE("Request %d: No output buffers provided!",
3092 __FUNCTION__, frameNumber);
3093 return BAD_VALUE;
3094 }
3095 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3096 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3097 request->num_output_buffers, MAX_NUM_STREAMS);
3098 return BAD_VALUE;
3099 }
3100 if (request->input_buffer != NULL) {
3101 b = request->input_buffer;
3102 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3103 LOGE("Request %d: Buffer %ld: Status not OK!",
3104 frameNumber, (long)idx);
3105 return BAD_VALUE;
3106 }
3107 if (b->release_fence != -1) {
3108 LOGE("Request %d: Buffer %ld: Has a release fence!",
3109 frameNumber, (long)idx);
3110 return BAD_VALUE;
3111 }
3112 if (b->buffer == NULL) {
3113 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3114 frameNumber, (long)idx);
3115 return BAD_VALUE;
3116 }
3117 }
3118
3119 // Validate all buffers
3120 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003121 if (b == NULL) {
3122 return BAD_VALUE;
3123 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003124 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003125 QCamera3ProcessingChannel *channel =
3126 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3127 if (channel == NULL) {
3128 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3129 frameNumber, (long)idx);
3130 return BAD_VALUE;
3131 }
3132 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3133 LOGE("Request %d: Buffer %ld: Status not OK!",
3134 frameNumber, (long)idx);
3135 return BAD_VALUE;
3136 }
3137 if (b->release_fence != -1) {
3138 LOGE("Request %d: Buffer %ld: Has a release fence!",
3139 frameNumber, (long)idx);
3140 return BAD_VALUE;
3141 }
3142 if (b->buffer == NULL) {
3143 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3144 frameNumber, (long)idx);
3145 return BAD_VALUE;
3146 }
3147 if (*(b->buffer) == NULL) {
3148 LOGE("Request %d: Buffer %ld: NULL private handle!",
3149 frameNumber, (long)idx);
3150 return BAD_VALUE;
3151 }
3152 idx++;
3153 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003154 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003155 return NO_ERROR;
3156}
3157
3158/*===========================================================================
3159 * FUNCTION : deriveMinFrameDuration
3160 *
3161 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3162 * on currently configured streams.
3163 *
3164 * PARAMETERS : NONE
3165 *
3166 * RETURN : NONE
3167 *
3168 *==========================================================================*/
3169void QCamera3HardwareInterface::deriveMinFrameDuration()
3170{
3171 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003172 bool hasRaw = false;
3173
3174 mMinRawFrameDuration = 0;
3175 mMinJpegFrameDuration = 0;
3176 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003177
3178 maxJpegDim = 0;
3179 maxProcessedDim = 0;
3180 maxRawDim = 0;
3181
3182 // Figure out maximum jpeg, processed, and raw dimensions
3183 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3184 it != mStreamInfo.end(); it++) {
3185
3186 // Input stream doesn't have valid stream_type
3187 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3188 continue;
3189
3190 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3191 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3192 if (dimension > maxJpegDim)
3193 maxJpegDim = dimension;
3194 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3195 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3196 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003197 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003198 if (dimension > maxRawDim)
3199 maxRawDim = dimension;
3200 } else {
3201 if (dimension > maxProcessedDim)
3202 maxProcessedDim = dimension;
3203 }
3204 }
3205
3206 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3207 MAX_SIZES_CNT);
3208
3209 //Assume all jpeg dimensions are in processed dimensions.
3210 if (maxJpegDim > maxProcessedDim)
3211 maxProcessedDim = maxJpegDim;
3212 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003213 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003214 maxRawDim = INT32_MAX;
3215
3216 for (size_t i = 0; i < count; i++) {
3217 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3218 gCamCapability[mCameraId]->raw_dim[i].height;
3219 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3220 maxRawDim = dimension;
3221 }
3222 }
3223
3224 //Find minimum durations for processed, jpeg, and raw
3225 for (size_t i = 0; i < count; i++) {
3226 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3227 gCamCapability[mCameraId]->raw_dim[i].height) {
3228 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3229 break;
3230 }
3231 }
3232 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3233 for (size_t i = 0; i < count; i++) {
3234 if (maxProcessedDim ==
3235 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3236 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3237 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3238 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3239 break;
3240 }
3241 }
3242}
3243
3244/*===========================================================================
3245 * FUNCTION : getMinFrameDuration
3246 *
3247 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3248 * and current request configuration.
3249 *
3250 * PARAMETERS : @request: requset sent by the frameworks
3251 *
3252 * RETURN : min farme duration for a particular request
3253 *
3254 *==========================================================================*/
3255int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3256{
3257 bool hasJpegStream = false;
3258 bool hasRawStream = false;
3259 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3260 const camera3_stream_t *stream = request->output_buffers[i].stream;
3261 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3262 hasJpegStream = true;
3263 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3264 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3265 stream->format == HAL_PIXEL_FORMAT_RAW16)
3266 hasRawStream = true;
3267 }
3268
3269 if (!hasJpegStream)
3270 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3271 else
3272 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3273}
3274
3275/*===========================================================================
3276 * FUNCTION : handleBuffersDuringFlushLock
3277 *
3278 * DESCRIPTION: Account for buffers returned from back-end during flush
3279 * This function is executed while mMutex is held by the caller.
3280 *
3281 * PARAMETERS :
3282 * @buffer: image buffer for the callback
3283 *
3284 * RETURN :
3285 *==========================================================================*/
3286void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3287{
3288 bool buffer_found = false;
3289 for (List<PendingBuffersInRequest>::iterator req =
3290 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3291 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3292 for (List<PendingBufferInfo>::iterator i =
3293 req->mPendingBufferList.begin();
3294 i != req->mPendingBufferList.end(); i++) {
3295 if (i->buffer == buffer->buffer) {
3296 mPendingBuffersMap.numPendingBufsAtFlush--;
3297 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3298 buffer->buffer, req->frame_number,
3299 mPendingBuffersMap.numPendingBufsAtFlush);
3300 buffer_found = true;
3301 break;
3302 }
3303 }
3304 if (buffer_found) {
3305 break;
3306 }
3307 }
3308 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3309 //signal the flush()
3310 LOGD("All buffers returned to HAL. Continue flush");
3311 pthread_cond_signal(&mBuffersCond);
3312 }
3313}
3314
Thierry Strudel3d639192016-09-09 11:52:26 -07003315/*===========================================================================
3316 * FUNCTION : handleBatchMetadata
3317 *
3318 * DESCRIPTION: Handles metadata buffer callback in batch mode
3319 *
3320 * PARAMETERS : @metadata_buf: metadata buffer
3321 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3322 * the meta buf in this method
3323 *
3324 * RETURN :
3325 *
3326 *==========================================================================*/
3327void QCamera3HardwareInterface::handleBatchMetadata(
3328 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3329{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003330 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003331
3332 if (NULL == metadata_buf) {
3333 LOGE("metadata_buf is NULL");
3334 return;
3335 }
3336 /* In batch mode, the metdata will contain the frame number and timestamp of
3337 * the last frame in the batch. Eg: a batch containing buffers from request
3338 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3339 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3340 * multiple process_capture_results */
3341 metadata_buffer_t *metadata =
3342 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3343 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3344 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3345 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3346 uint32_t frame_number = 0, urgent_frame_number = 0;
3347 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3348 bool invalid_metadata = false;
3349 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3350 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003351 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003352
3353 int32_t *p_frame_number_valid =
3354 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3355 uint32_t *p_frame_number =
3356 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3357 int64_t *p_capture_time =
3358 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3359 int32_t *p_urgent_frame_number_valid =
3360 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3361 uint32_t *p_urgent_frame_number =
3362 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3363
3364 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3365 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3366 (NULL == p_urgent_frame_number)) {
3367 LOGE("Invalid metadata");
3368 invalid_metadata = true;
3369 } else {
3370 frame_number_valid = *p_frame_number_valid;
3371 last_frame_number = *p_frame_number;
3372 last_frame_capture_time = *p_capture_time;
3373 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3374 last_urgent_frame_number = *p_urgent_frame_number;
3375 }
3376
3377 /* In batchmode, when no video buffers are requested, set_parms are sent
3378 * for every capture_request. The difference between consecutive urgent
3379 * frame numbers and frame numbers should be used to interpolate the
3380 * corresponding frame numbers and time stamps */
3381 pthread_mutex_lock(&mMutex);
3382 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003383 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3384 if(idx < 0) {
3385 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3386 last_urgent_frame_number);
3387 mState = ERROR;
3388 pthread_mutex_unlock(&mMutex);
3389 return;
3390 }
3391 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003392 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3393 first_urgent_frame_number;
3394
3395 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3396 urgent_frame_number_valid,
3397 first_urgent_frame_number, last_urgent_frame_number);
3398 }
3399
3400 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003401 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3402 if(idx < 0) {
3403 LOGE("Invalid frame number received: %d. Irrecoverable error",
3404 last_frame_number);
3405 mState = ERROR;
3406 pthread_mutex_unlock(&mMutex);
3407 return;
3408 }
3409 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003410 frameNumDiff = last_frame_number + 1 -
3411 first_frame_number;
3412 mPendingBatchMap.removeItem(last_frame_number);
3413
3414 LOGD("frm: valid: %d frm_num: %d - %d",
3415 frame_number_valid,
3416 first_frame_number, last_frame_number);
3417
3418 }
3419 pthread_mutex_unlock(&mMutex);
3420
3421 if (urgent_frame_number_valid || frame_number_valid) {
3422 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3423 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3424 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3425 urgentFrameNumDiff, last_urgent_frame_number);
3426 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3427 LOGE("frameNumDiff: %d frameNum: %d",
3428 frameNumDiff, last_frame_number);
3429 }
3430
3431 for (size_t i = 0; i < loopCount; i++) {
3432 /* handleMetadataWithLock is called even for invalid_metadata for
3433 * pipeline depth calculation */
3434 if (!invalid_metadata) {
3435 /* Infer frame number. Batch metadata contains frame number of the
3436 * last frame */
3437 if (urgent_frame_number_valid) {
3438 if (i < urgentFrameNumDiff) {
3439 urgent_frame_number =
3440 first_urgent_frame_number + i;
3441 LOGD("inferred urgent frame_number: %d",
3442 urgent_frame_number);
3443 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3444 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3445 } else {
3446 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3447 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3448 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3449 }
3450 }
3451
3452 /* Infer frame number. Batch metadata contains frame number of the
3453 * last frame */
3454 if (frame_number_valid) {
3455 if (i < frameNumDiff) {
3456 frame_number = first_frame_number + i;
3457 LOGD("inferred frame_number: %d", frame_number);
3458 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3459 CAM_INTF_META_FRAME_NUMBER, frame_number);
3460 } else {
3461 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3462 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3463 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3464 }
3465 }
3466
3467 if (last_frame_capture_time) {
3468 //Infer timestamp
3469 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003470 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003471 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003472 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003473 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3474 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3475 LOGD("batch capture_time: %lld, capture_time: %lld",
3476 last_frame_capture_time, capture_time);
3477 }
3478 }
3479 pthread_mutex_lock(&mMutex);
3480 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003481 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003482 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3483 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003484 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003485 pthread_mutex_unlock(&mMutex);
3486 }
3487
3488 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003489 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003490 mMetadataChannel->bufDone(metadata_buf);
3491 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003492 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003493 }
3494}
3495
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003496void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3497 camera3_error_msg_code_t errorCode)
3498{
3499 camera3_notify_msg_t notify_msg;
3500 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3501 notify_msg.type = CAMERA3_MSG_ERROR;
3502 notify_msg.message.error.error_code = errorCode;
3503 notify_msg.message.error.error_stream = NULL;
3504 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003505 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003506
3507 return;
3508}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003509
3510/*===========================================================================
3511 * FUNCTION : sendPartialMetadataWithLock
3512 *
3513 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3514 *
3515 * PARAMETERS : @metadata: metadata buffer
3516 * @requestIter: The iterator for the pending capture request for
3517 * which the partial result is being sen
3518 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3519 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003520 * @isJumpstartMetadata: Whether this is a partial metadata for
3521 * jumpstart, i.e. even though it doesn't map to a valid partial
3522 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003523 *
3524 * RETURN :
3525 *
3526 *==========================================================================*/
3527
3528void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3529 metadata_buffer_t *metadata,
3530 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003531 bool lastUrgentMetadataInBatch,
3532 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003533{
3534 camera3_capture_result_t result;
3535 memset(&result, 0, sizeof(camera3_capture_result_t));
3536
3537 requestIter->partial_result_cnt++;
3538
3539 // Extract 3A metadata
3540 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003541 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3542 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003543 // Populate metadata result
3544 result.frame_number = requestIter->frame_number;
3545 result.num_output_buffers = 0;
3546 result.output_buffers = NULL;
3547 result.partial_result = requestIter->partial_result_cnt;
3548
3549 {
3550 Mutex::Autolock l(gHdrPlusClientLock);
3551 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3552 // Notify HDR+ client about the partial metadata.
3553 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3554 result.partial_result == PARTIAL_RESULT_COUNT);
3555 }
3556 }
3557
3558 orchestrateResult(&result);
3559 LOGD("urgent frame_number = %u", result.frame_number);
3560 free_camera_metadata((camera_metadata_t *)result.result);
3561}
3562
Thierry Strudel3d639192016-09-09 11:52:26 -07003563/*===========================================================================
3564 * FUNCTION : handleMetadataWithLock
3565 *
3566 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3567 *
3568 * PARAMETERS : @metadata_buf: metadata buffer
3569 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3570 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003571 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3572 * last urgent metadata in a batch. Always true for non-batch mode
3573 * @lastMetadataInBatch: Boolean to indicate whether this is the
3574 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003575 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3576 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003577 *
3578 * RETURN :
3579 *
3580 *==========================================================================*/
3581void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003582 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003583 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3584 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003585{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003586 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003587 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3588 //during flush do not send metadata from this thread
3589 LOGD("not sending metadata during flush or when mState is error");
3590 if (free_and_bufdone_meta_buf) {
3591 mMetadataChannel->bufDone(metadata_buf);
3592 free(metadata_buf);
3593 }
3594 return;
3595 }
3596
3597 //not in flush
3598 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3599 int32_t frame_number_valid, urgent_frame_number_valid;
3600 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003601 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003602 nsecs_t currentSysTime;
3603
3604 int32_t *p_frame_number_valid =
3605 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3606 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3607 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003608 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003609 int32_t *p_urgent_frame_number_valid =
3610 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3611 uint32_t *p_urgent_frame_number =
3612 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3613 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3614 metadata) {
3615 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3616 *p_frame_number_valid, *p_frame_number);
3617 }
3618
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003619 camera_metadata_t *resultMetadata = nullptr;
3620
Thierry Strudel3d639192016-09-09 11:52:26 -07003621 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3622 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3623 LOGE("Invalid metadata");
3624 if (free_and_bufdone_meta_buf) {
3625 mMetadataChannel->bufDone(metadata_buf);
3626 free(metadata_buf);
3627 }
3628 goto done_metadata;
3629 }
3630 frame_number_valid = *p_frame_number_valid;
3631 frame_number = *p_frame_number;
3632 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003633 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003634 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3635 urgent_frame_number = *p_urgent_frame_number;
3636 currentSysTime = systemTime(CLOCK_MONOTONIC);
3637
Jason Lee603176d2017-05-31 11:43:27 -07003638 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3639 const int tries = 3;
3640 nsecs_t bestGap, measured;
3641 for (int i = 0; i < tries; ++i) {
3642 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3643 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3644 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3645 const nsecs_t gap = tmono2 - tmono;
3646 if (i == 0 || gap < bestGap) {
3647 bestGap = gap;
3648 measured = tbase - ((tmono + tmono2) >> 1);
3649 }
3650 }
3651 capture_time -= measured;
3652 }
3653
Thierry Strudel3d639192016-09-09 11:52:26 -07003654 // Detect if buffers from any requests are overdue
3655 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003656 int64_t timeout;
3657 {
3658 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3659 // If there is a pending HDR+ request, the following requests may be blocked until the
3660 // HDR+ request is done. So allow a longer timeout.
3661 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3662 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003663 if (timeout < mExpectedInflightDuration) {
3664 timeout = mExpectedInflightDuration;
3665 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003666 }
3667
3668 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003669 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003670 assert(missed.stream->priv);
3671 if (missed.stream->priv) {
3672 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3673 assert(ch->mStreams[0]);
3674 if (ch->mStreams[0]) {
3675 LOGE("Cancel missing frame = %d, buffer = %p,"
3676 "stream type = %d, stream format = %d",
3677 req.frame_number, missed.buffer,
3678 ch->mStreams[0]->getMyType(), missed.stream->format);
3679 ch->timeoutFrame(req.frame_number);
3680 }
3681 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003682 }
3683 }
3684 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003685 //For the very first metadata callback, regardless whether it contains valid
3686 //frame number, send the partial metadata for the jumpstarting requests.
3687 //Note that this has to be done even if the metadata doesn't contain valid
3688 //urgent frame number, because in the case only 1 request is ever submitted
3689 //to HAL, there won't be subsequent valid urgent frame number.
3690 if (mFirstMetadataCallback) {
3691 for (pendingRequestIterator i =
3692 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3693 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003694 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3695 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003696 }
3697 }
3698 mFirstMetadataCallback = false;
3699 }
3700
Thierry Strudel3d639192016-09-09 11:52:26 -07003701 //Partial result on process_capture_result for timestamp
3702 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003703 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003704
3705 //Recieved an urgent Frame Number, handle it
3706 //using partial results
3707 for (pendingRequestIterator i =
3708 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3709 LOGD("Iterator Frame = %d urgent frame = %d",
3710 i->frame_number, urgent_frame_number);
3711
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003712 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003713 (i->partial_result_cnt == 0)) {
3714 LOGE("Error: HAL missed urgent metadata for frame number %d",
3715 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003716 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003717 }
3718
3719 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003720 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003721 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3722 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003723 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3724 // Instant AEC settled for this frame.
3725 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3726 mInstantAECSettledFrameNumber = urgent_frame_number;
3727 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003728 break;
3729 }
3730 }
3731 }
3732
3733 if (!frame_number_valid) {
3734 LOGD("Not a valid normal frame number, used as SOF only");
3735 if (free_and_bufdone_meta_buf) {
3736 mMetadataChannel->bufDone(metadata_buf);
3737 free(metadata_buf);
3738 }
3739 goto done_metadata;
3740 }
3741 LOGH("valid frame_number = %u, capture_time = %lld",
3742 frame_number, capture_time);
3743
Emilian Peev4e0fe952017-06-30 12:40:09 -07003744 handleDepthDataLocked(metadata->depth_data, frame_number,
3745 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003746
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003747 // Check whether any stream buffer corresponding to this is dropped or not
3748 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3749 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3750 for (auto & pendingRequest : mPendingRequestsList) {
3751 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3752 mInstantAECSettledFrameNumber)) {
3753 camera3_notify_msg_t notify_msg = {};
3754 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003755 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003756 QCamera3ProcessingChannel *channel =
3757 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003758 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003759 if (p_cam_frame_drop) {
3760 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003761 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003762 // Got the stream ID for drop frame.
3763 dropFrame = true;
3764 break;
3765 }
3766 }
3767 } else {
3768 // This is instant AEC case.
3769 // For instant AEC drop the stream untill AEC is settled.
3770 dropFrame = true;
3771 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003772
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003773 if (dropFrame) {
3774 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3775 if (p_cam_frame_drop) {
3776 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003777 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003778 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003779 } else {
3780 // For instant AEC, inform frame drop and frame number
3781 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3782 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003783 pendingRequest.frame_number, streamID,
3784 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003785 }
3786 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003787 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003788 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003789 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003790 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003791 if (p_cam_frame_drop) {
3792 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003793 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003794 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003795 } else {
3796 // For instant AEC, inform frame drop and frame number
3797 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3798 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003799 pendingRequest.frame_number, streamID,
3800 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003801 }
3802 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003803 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003804 PendingFrameDrop.stream_ID = streamID;
3805 // Add the Frame drop info to mPendingFrameDropList
3806 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003807 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003808 }
3809 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003810 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003811
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003812 for (auto & pendingRequest : mPendingRequestsList) {
3813 // Find the pending request with the frame number.
3814 if (pendingRequest.frame_number == frame_number) {
3815 // Update the sensor timestamp.
3816 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003817
Thierry Strudel3d639192016-09-09 11:52:26 -07003818
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003819 /* Set the timestamp in display metadata so that clients aware of
3820 private_handle such as VT can use this un-modified timestamps.
3821 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003822 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003823
Thierry Strudel3d639192016-09-09 11:52:26 -07003824 // Find channel requiring metadata, meaning internal offline postprocess
3825 // is needed.
3826 //TODO: for now, we don't support two streams requiring metadata at the same time.
3827 // (because we are not making copies, and metadata buffer is not reference counted.
3828 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003829 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3830 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003831 if (iter->need_metadata) {
3832 internalPproc = true;
3833 QCamera3ProcessingChannel *channel =
3834 (QCamera3ProcessingChannel *)iter->stream->priv;
3835 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003836 if(p_is_metabuf_queued != NULL) {
3837 *p_is_metabuf_queued = true;
3838 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003839 break;
3840 }
3841 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003842 for (auto itr = pendingRequest.internalRequestList.begin();
3843 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003844 if (itr->need_metadata) {
3845 internalPproc = true;
3846 QCamera3ProcessingChannel *channel =
3847 (QCamera3ProcessingChannel *)itr->stream->priv;
3848 channel->queueReprocMetadata(metadata_buf);
3849 break;
3850 }
3851 }
3852
Thierry Strudel54dc9782017-02-15 12:12:10 -08003853 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003854
3855 bool *enableZsl = nullptr;
3856 if (gExposeEnableZslKey) {
3857 enableZsl = &pendingRequest.enableZsl;
3858 }
3859
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003860 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003861 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003862 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003863
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003864 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003865
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003866 if (pendingRequest.blob_request) {
3867 //Dump tuning metadata if enabled and available
3868 char prop[PROPERTY_VALUE_MAX];
3869 memset(prop, 0, sizeof(prop));
3870 property_get("persist.camera.dumpmetadata", prop, "0");
3871 int32_t enabled = atoi(prop);
3872 if (enabled && metadata->is_tuning_params_valid) {
3873 dumpMetadataToFile(metadata->tuning_params,
3874 mMetaFrameCount,
3875 enabled,
3876 "Snapshot",
3877 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003878 }
3879 }
3880
3881 if (!internalPproc) {
3882 LOGD("couldn't find need_metadata for this metadata");
3883 // Return metadata buffer
3884 if (free_and_bufdone_meta_buf) {
3885 mMetadataChannel->bufDone(metadata_buf);
3886 free(metadata_buf);
3887 }
3888 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003889
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003890 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003891 }
3892 }
3893
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003894 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3895
3896 // Try to send out capture result metadata.
3897 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003898 return;
3899
Thierry Strudel3d639192016-09-09 11:52:26 -07003900done_metadata:
3901 for (pendingRequestIterator i = mPendingRequestsList.begin();
3902 i != mPendingRequestsList.end() ;i++) {
3903 i->pipeline_depth++;
3904 }
3905 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3906 unblockRequestIfNecessary();
3907}
3908
3909/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003910 * FUNCTION : handleDepthDataWithLock
3911 *
3912 * DESCRIPTION: Handles incoming depth data
3913 *
3914 * PARAMETERS : @depthData : Depth data
3915 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003916 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003917 *
3918 * RETURN :
3919 *
3920 *==========================================================================*/
3921void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003922 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003923 uint32_t currentFrameNumber;
3924 buffer_handle_t *depthBuffer;
3925
3926 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003927 return;
3928 }
3929
3930 camera3_stream_buffer_t resultBuffer =
3931 {.acquire_fence = -1,
3932 .release_fence = -1,
3933 .status = CAMERA3_BUFFER_STATUS_OK,
3934 .buffer = nullptr,
3935 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003936 do {
3937 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3938 if (nullptr == depthBuffer) {
3939 break;
3940 }
3941
Emilian Peev7650c122017-01-19 08:24:33 -08003942 resultBuffer.buffer = depthBuffer;
3943 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003944 if (valid) {
3945 int32_t rc = mDepthChannel->populateDepthData(depthData,
3946 frameNumber);
3947 if (NO_ERROR != rc) {
3948 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3949 } else {
3950 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3951 }
Emilian Peev7650c122017-01-19 08:24:33 -08003952 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003953 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003954 }
3955 } else if (currentFrameNumber > frameNumber) {
3956 break;
3957 } else {
3958 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3959 {{currentFrameNumber, mDepthChannel->getStream(),
3960 CAMERA3_MSG_ERROR_BUFFER}}};
3961 orchestrateNotify(&notify_msg);
3962
3963 LOGE("Depth buffer for frame number: %d is missing "
3964 "returning back!", currentFrameNumber);
3965 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3966 }
3967 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003968 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003969 } while (currentFrameNumber < frameNumber);
3970}
3971
3972/*===========================================================================
3973 * FUNCTION : notifyErrorFoPendingDepthData
3974 *
3975 * DESCRIPTION: Returns error for any pending depth buffers
3976 *
3977 * PARAMETERS : depthCh - depth channel that needs to get flushed
3978 *
3979 * RETURN :
3980 *
3981 *==========================================================================*/
3982void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3983 QCamera3DepthChannel *depthCh) {
3984 uint32_t currentFrameNumber;
3985 buffer_handle_t *depthBuffer;
3986
3987 if (nullptr == depthCh) {
3988 return;
3989 }
3990
3991 camera3_notify_msg_t notify_msg =
3992 {.type = CAMERA3_MSG_ERROR,
3993 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3994 camera3_stream_buffer_t resultBuffer =
3995 {.acquire_fence = -1,
3996 .release_fence = -1,
3997 .buffer = nullptr,
3998 .stream = depthCh->getStream(),
3999 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004000
4001 while (nullptr !=
4002 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4003 depthCh->unmapBuffer(currentFrameNumber);
4004
4005 notify_msg.message.error.frame_number = currentFrameNumber;
4006 orchestrateNotify(&notify_msg);
4007
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004008 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004009 };
4010}
4011
4012/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004013 * FUNCTION : hdrPlusPerfLock
4014 *
4015 * DESCRIPTION: perf lock for HDR+ using custom intent
4016 *
4017 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4018 *
4019 * RETURN : None
4020 *
4021 *==========================================================================*/
4022void QCamera3HardwareInterface::hdrPlusPerfLock(
4023 mm_camera_super_buf_t *metadata_buf)
4024{
4025 if (NULL == metadata_buf) {
4026 LOGE("metadata_buf is NULL");
4027 return;
4028 }
4029 metadata_buffer_t *metadata =
4030 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4031 int32_t *p_frame_number_valid =
4032 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4033 uint32_t *p_frame_number =
4034 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4035
4036 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4037 LOGE("%s: Invalid metadata", __func__);
4038 return;
4039 }
4040
4041 //acquire perf lock for 5 sec after the last HDR frame is captured
4042 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4043 if ((p_frame_number != NULL) &&
4044 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004045 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004046 }
4047 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004048}
4049
4050/*===========================================================================
4051 * FUNCTION : handleInputBufferWithLock
4052 *
4053 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4054 *
4055 * PARAMETERS : @frame_number: frame number of the input buffer
4056 *
4057 * RETURN :
4058 *
4059 *==========================================================================*/
4060void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4061{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004062 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004063 pendingRequestIterator i = mPendingRequestsList.begin();
4064 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4065 i++;
4066 }
4067 if (i != mPendingRequestsList.end() && i->input_buffer) {
4068 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004069 CameraMetadata settings;
4070 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4071 if(i->settings) {
4072 settings = i->settings;
4073 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4074 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004075 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004076 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004077 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004078 } else {
4079 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004080 }
4081
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004082 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4083 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4084 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004085
4086 camera3_capture_result result;
4087 memset(&result, 0, sizeof(camera3_capture_result));
4088 result.frame_number = frame_number;
4089 result.result = i->settings;
4090 result.input_buffer = i->input_buffer;
4091 result.partial_result = PARTIAL_RESULT_COUNT;
4092
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004093 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004094 LOGD("Input request metadata and input buffer frame_number = %u",
4095 i->frame_number);
4096 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004097
4098 // Dispatch result metadata that may be just unblocked by this reprocess result.
4099 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004100 } else {
4101 LOGE("Could not find input request for frame number %d", frame_number);
4102 }
4103}
4104
4105/*===========================================================================
4106 * FUNCTION : handleBufferWithLock
4107 *
4108 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4109 *
4110 * PARAMETERS : @buffer: image buffer for the callback
4111 * @frame_number: frame number of the image buffer
4112 *
4113 * RETURN :
4114 *
4115 *==========================================================================*/
4116void QCamera3HardwareInterface::handleBufferWithLock(
4117 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4118{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004119 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004120
4121 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4122 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4123 }
4124
Thierry Strudel3d639192016-09-09 11:52:26 -07004125 /* Nothing to be done during error state */
4126 if ((ERROR == mState) || (DEINIT == mState)) {
4127 return;
4128 }
4129 if (mFlushPerf) {
4130 handleBuffersDuringFlushLock(buffer);
4131 return;
4132 }
4133 //not in flush
4134 // If the frame number doesn't exist in the pending request list,
4135 // directly send the buffer to the frameworks, and update pending buffers map
4136 // Otherwise, book-keep the buffer.
4137 pendingRequestIterator i = mPendingRequestsList.begin();
4138 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4139 i++;
4140 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004141
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004142 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004143 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004144 // For a reprocessing request, try to send out result metadata.
4145 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004146 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004147 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004148
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004149 // Check if this frame was dropped.
4150 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4151 m != mPendingFrameDropList.end(); m++) {
4152 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4153 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4154 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4155 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4156 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4157 frame_number, streamID);
4158 m = mPendingFrameDropList.erase(m);
4159 break;
4160 }
4161 }
4162
4163 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4164 LOGH("result frame_number = %d, buffer = %p",
4165 frame_number, buffer->buffer);
4166
4167 mPendingBuffersMap.removeBuf(buffer->buffer);
4168 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4169
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004170 if (mPreviewStarted == false) {
4171 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4172 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004173 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4174
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004175 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4176 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4177 mPreviewStarted = true;
4178
4179 // Set power hint for preview
4180 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4181 }
4182 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004183}
4184
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004185void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004186 const camera_metadata_t *resultMetadata)
4187{
4188 // Find the pending request for this result metadata.
4189 auto requestIter = mPendingRequestsList.begin();
4190 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4191 requestIter++;
4192 }
4193
4194 if (requestIter == mPendingRequestsList.end()) {
4195 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4196 return;
4197 }
4198
4199 // Update the result metadata
4200 requestIter->resultMetadata = resultMetadata;
4201
4202 // Check what type of request this is.
4203 bool liveRequest = false;
4204 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004205 // HDR+ request doesn't have partial results.
4206 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004207 } else if (requestIter->input_buffer != nullptr) {
4208 // Reprocessing request result is the same as settings.
4209 requestIter->resultMetadata = requestIter->settings;
4210 // Reprocessing request doesn't have partial results.
4211 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4212 } else {
4213 liveRequest = true;
4214 requestIter->partial_result_cnt++;
4215 mPendingLiveRequest--;
4216
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004217 {
4218 Mutex::Autolock l(gHdrPlusClientLock);
4219 // For a live request, send the metadata to HDR+ client.
4220 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4221 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4222 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4223 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004224 }
4225 }
4226
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004227 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4228}
4229
4230void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4231 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004232 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4233 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004234 bool readyToSend = true;
4235
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004236 // Iterate through the pending requests to send out result metadata that are ready. Also if
4237 // this result metadata belongs to a live request, notify errors for previous live requests
4238 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004239 auto iter = mPendingRequestsList.begin();
4240 while (iter != mPendingRequestsList.end()) {
4241 // Check if current pending request is ready. If it's not ready, the following pending
4242 // requests are also not ready.
4243 if (readyToSend && iter->resultMetadata == nullptr) {
4244 readyToSend = false;
4245 }
4246
4247 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4248
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004249 camera3_capture_result_t result = {};
4250 result.frame_number = iter->frame_number;
4251 result.result = iter->resultMetadata;
4252 result.partial_result = iter->partial_result_cnt;
4253
4254 // If this pending buffer has result metadata, we may be able to send out shutter callback
4255 // and result metadata.
4256 if (iter->resultMetadata != nullptr) {
4257 if (!readyToSend) {
4258 // If any of the previous pending request is not ready, this pending request is
4259 // also not ready to send in order to keep shutter callbacks and result metadata
4260 // in order.
4261 iter++;
4262 continue;
4263 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004264 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004265 // If the result metadata belongs to a live request, notify errors for previous pending
4266 // live requests.
4267 mPendingLiveRequest--;
4268
4269 CameraMetadata dummyMetadata;
4270 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4271 result.result = dummyMetadata.release();
4272
4273 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004274
4275 // partial_result should be PARTIAL_RESULT_CNT in case of
4276 // ERROR_RESULT.
4277 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4278 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004279 } else {
4280 iter++;
4281 continue;
4282 }
4283
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004284 result.output_buffers = nullptr;
4285 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004286 orchestrateResult(&result);
4287
4288 // For reprocessing, result metadata is the same as settings so do not free it here to
4289 // avoid double free.
4290 if (result.result != iter->settings) {
4291 free_camera_metadata((camera_metadata_t *)result.result);
4292 }
4293 iter->resultMetadata = nullptr;
4294 iter = erasePendingRequest(iter);
4295 }
4296
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004297 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004298 for (auto &iter : mPendingRequestsList) {
4299 // Increment pipeline depth for the following pending requests.
4300 if (iter.frame_number > frameNumber) {
4301 iter.pipeline_depth++;
4302 }
4303 }
4304 }
4305
4306 unblockRequestIfNecessary();
4307}
4308
Thierry Strudel3d639192016-09-09 11:52:26 -07004309/*===========================================================================
4310 * FUNCTION : unblockRequestIfNecessary
4311 *
4312 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4313 * that mMutex is held when this function is called.
4314 *
4315 * PARAMETERS :
4316 *
4317 * RETURN :
4318 *
4319 *==========================================================================*/
4320void QCamera3HardwareInterface::unblockRequestIfNecessary()
4321{
4322 // Unblock process_capture_request
4323 pthread_cond_signal(&mRequestCond);
4324}
4325
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004326/*===========================================================================
4327 * FUNCTION : isHdrSnapshotRequest
4328 *
4329 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4330 *
4331 * PARAMETERS : camera3 request structure
4332 *
4333 * RETURN : boolean decision variable
4334 *
4335 *==========================================================================*/
4336bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4337{
4338 if (request == NULL) {
4339 LOGE("Invalid request handle");
4340 assert(0);
4341 return false;
4342 }
4343
4344 if (!mForceHdrSnapshot) {
4345 CameraMetadata frame_settings;
4346 frame_settings = request->settings;
4347
4348 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4349 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4350 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4351 return false;
4352 }
4353 } else {
4354 return false;
4355 }
4356
4357 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4358 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4359 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4360 return false;
4361 }
4362 } else {
4363 return false;
4364 }
4365 }
4366
4367 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4368 if (request->output_buffers[i].stream->format
4369 == HAL_PIXEL_FORMAT_BLOB) {
4370 return true;
4371 }
4372 }
4373
4374 return false;
4375}
4376/*===========================================================================
4377 * FUNCTION : orchestrateRequest
4378 *
4379 * DESCRIPTION: Orchestrates a capture request from camera service
4380 *
4381 * PARAMETERS :
4382 * @request : request from framework to process
4383 *
4384 * RETURN : Error status codes
4385 *
4386 *==========================================================================*/
4387int32_t QCamera3HardwareInterface::orchestrateRequest(
4388 camera3_capture_request_t *request)
4389{
4390
4391 uint32_t originalFrameNumber = request->frame_number;
4392 uint32_t originalOutputCount = request->num_output_buffers;
4393 const camera_metadata_t *original_settings = request->settings;
4394 List<InternalRequest> internallyRequestedStreams;
4395 List<InternalRequest> emptyInternalList;
4396
4397 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4398 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4399 uint32_t internalFrameNumber;
4400 CameraMetadata modified_meta;
4401
4402
4403 /* Add Blob channel to list of internally requested streams */
4404 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4405 if (request->output_buffers[i].stream->format
4406 == HAL_PIXEL_FORMAT_BLOB) {
4407 InternalRequest streamRequested;
4408 streamRequested.meteringOnly = 1;
4409 streamRequested.need_metadata = 0;
4410 streamRequested.stream = request->output_buffers[i].stream;
4411 internallyRequestedStreams.push_back(streamRequested);
4412 }
4413 }
4414 request->num_output_buffers = 0;
4415 auto itr = internallyRequestedStreams.begin();
4416
4417 /* Modify setting to set compensation */
4418 modified_meta = request->settings;
4419 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4420 uint8_t aeLock = 1;
4421 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4422 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4423 camera_metadata_t *modified_settings = modified_meta.release();
4424 request->settings = modified_settings;
4425
4426 /* Capture Settling & -2x frame */
4427 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4428 request->frame_number = internalFrameNumber;
4429 processCaptureRequest(request, internallyRequestedStreams);
4430
4431 request->num_output_buffers = originalOutputCount;
4432 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4433 request->frame_number = internalFrameNumber;
4434 processCaptureRequest(request, emptyInternalList);
4435 request->num_output_buffers = 0;
4436
4437 modified_meta = modified_settings;
4438 expCompensation = 0;
4439 aeLock = 1;
4440 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4441 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4442 modified_settings = modified_meta.release();
4443 request->settings = modified_settings;
4444
4445 /* Capture Settling & 0X frame */
4446
4447 itr = internallyRequestedStreams.begin();
4448 if (itr == internallyRequestedStreams.end()) {
4449 LOGE("Error Internally Requested Stream list is empty");
4450 assert(0);
4451 } else {
4452 itr->need_metadata = 0;
4453 itr->meteringOnly = 1;
4454 }
4455
4456 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4457 request->frame_number = internalFrameNumber;
4458 processCaptureRequest(request, internallyRequestedStreams);
4459
4460 itr = internallyRequestedStreams.begin();
4461 if (itr == internallyRequestedStreams.end()) {
4462 ALOGE("Error Internally Requested Stream list is empty");
4463 assert(0);
4464 } else {
4465 itr->need_metadata = 1;
4466 itr->meteringOnly = 0;
4467 }
4468
4469 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4470 request->frame_number = internalFrameNumber;
4471 processCaptureRequest(request, internallyRequestedStreams);
4472
4473 /* Capture 2X frame*/
4474 modified_meta = modified_settings;
4475 expCompensation = GB_HDR_2X_STEP_EV;
4476 aeLock = 1;
4477 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4478 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4479 modified_settings = modified_meta.release();
4480 request->settings = modified_settings;
4481
4482 itr = internallyRequestedStreams.begin();
4483 if (itr == internallyRequestedStreams.end()) {
4484 ALOGE("Error Internally Requested Stream list is empty");
4485 assert(0);
4486 } else {
4487 itr->need_metadata = 0;
4488 itr->meteringOnly = 1;
4489 }
4490 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4491 request->frame_number = internalFrameNumber;
4492 processCaptureRequest(request, internallyRequestedStreams);
4493
4494 itr = internallyRequestedStreams.begin();
4495 if (itr == internallyRequestedStreams.end()) {
4496 ALOGE("Error Internally Requested Stream list is empty");
4497 assert(0);
4498 } else {
4499 itr->need_metadata = 1;
4500 itr->meteringOnly = 0;
4501 }
4502
4503 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4504 request->frame_number = internalFrameNumber;
4505 processCaptureRequest(request, internallyRequestedStreams);
4506
4507
4508 /* Capture 2X on original streaming config*/
4509 internallyRequestedStreams.clear();
4510
4511 /* Restore original settings pointer */
4512 request->settings = original_settings;
4513 } else {
4514 uint32_t internalFrameNumber;
4515 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4516 request->frame_number = internalFrameNumber;
4517 return processCaptureRequest(request, internallyRequestedStreams);
4518 }
4519
4520 return NO_ERROR;
4521}
4522
4523/*===========================================================================
4524 * FUNCTION : orchestrateResult
4525 *
4526 * DESCRIPTION: Orchestrates a capture result to camera service
4527 *
4528 * PARAMETERS :
4529 * @request : request from framework to process
4530 *
4531 * RETURN :
4532 *
4533 *==========================================================================*/
4534void QCamera3HardwareInterface::orchestrateResult(
4535 camera3_capture_result_t *result)
4536{
4537 uint32_t frameworkFrameNumber;
4538 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4539 frameworkFrameNumber);
4540 if (rc != NO_ERROR) {
4541 LOGE("Cannot find translated frameworkFrameNumber");
4542 assert(0);
4543 } else {
4544 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004545 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004546 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004547 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004548 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4549 camera_metadata_entry_t entry;
4550 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4551 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004552 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004553 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4554 if (ret != OK)
4555 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004556 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004557 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004558 result->frame_number = frameworkFrameNumber;
4559 mCallbackOps->process_capture_result(mCallbackOps, result);
4560 }
4561 }
4562}
4563
4564/*===========================================================================
4565 * FUNCTION : orchestrateNotify
4566 *
4567 * DESCRIPTION: Orchestrates a notify to camera service
4568 *
4569 * PARAMETERS :
4570 * @request : request from framework to process
4571 *
4572 * RETURN :
4573 *
4574 *==========================================================================*/
4575void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4576{
4577 uint32_t frameworkFrameNumber;
4578 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004579 int32_t rc = NO_ERROR;
4580
4581 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004582 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004583
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004584 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004585 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4586 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4587 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004588 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004589 LOGE("Cannot find translated frameworkFrameNumber");
4590 assert(0);
4591 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004592 }
4593 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004594
4595 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4596 LOGD("Internal Request drop the notifyCb");
4597 } else {
4598 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4599 mCallbackOps->notify(mCallbackOps, notify_msg);
4600 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004601}
4602
4603/*===========================================================================
4604 * FUNCTION : FrameNumberRegistry
4605 *
4606 * DESCRIPTION: Constructor
4607 *
4608 * PARAMETERS :
4609 *
4610 * RETURN :
4611 *
4612 *==========================================================================*/
4613FrameNumberRegistry::FrameNumberRegistry()
4614{
4615 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4616}
4617
4618/*===========================================================================
4619 * FUNCTION : ~FrameNumberRegistry
4620 *
4621 * DESCRIPTION: Destructor
4622 *
4623 * PARAMETERS :
4624 *
4625 * RETURN :
4626 *
4627 *==========================================================================*/
4628FrameNumberRegistry::~FrameNumberRegistry()
4629{
4630}
4631
4632/*===========================================================================
4633 * FUNCTION : PurgeOldEntriesLocked
4634 *
4635 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4636 *
4637 * PARAMETERS :
4638 *
4639 * RETURN : NONE
4640 *
4641 *==========================================================================*/
4642void FrameNumberRegistry::purgeOldEntriesLocked()
4643{
4644 while (_register.begin() != _register.end()) {
4645 auto itr = _register.begin();
4646 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4647 _register.erase(itr);
4648 } else {
4649 return;
4650 }
4651 }
4652}
4653
4654/*===========================================================================
4655 * FUNCTION : allocStoreInternalFrameNumber
4656 *
4657 * DESCRIPTION: Method to note down a framework request and associate a new
4658 * internal request number against it
4659 *
4660 * PARAMETERS :
4661 * @fFrameNumber: Identifier given by framework
4662 * @internalFN : Output parameter which will have the newly generated internal
4663 * entry
4664 *
4665 * RETURN : Error code
4666 *
4667 *==========================================================================*/
4668int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4669 uint32_t &internalFrameNumber)
4670{
4671 Mutex::Autolock lock(mRegistryLock);
4672 internalFrameNumber = _nextFreeInternalNumber++;
4673 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4674 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4675 purgeOldEntriesLocked();
4676 return NO_ERROR;
4677}
4678
4679/*===========================================================================
4680 * FUNCTION : generateStoreInternalFrameNumber
4681 *
4682 * DESCRIPTION: Method to associate a new internal request number independent
4683 * of any associate with framework requests
4684 *
4685 * PARAMETERS :
4686 * @internalFrame#: Output parameter which will have the newly generated internal
4687 *
4688 *
4689 * RETURN : Error code
4690 *
4691 *==========================================================================*/
4692int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4693{
4694 Mutex::Autolock lock(mRegistryLock);
4695 internalFrameNumber = _nextFreeInternalNumber++;
4696 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4697 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4698 purgeOldEntriesLocked();
4699 return NO_ERROR;
4700}
4701
4702/*===========================================================================
4703 * FUNCTION : getFrameworkFrameNumber
4704 *
4705 * DESCRIPTION: Method to query the framework framenumber given an internal #
4706 *
4707 * PARAMETERS :
4708 * @internalFrame#: Internal reference
4709 * @frameworkframenumber: Output parameter holding framework frame entry
4710 *
4711 * RETURN : Error code
4712 *
4713 *==========================================================================*/
4714int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4715 uint32_t &frameworkFrameNumber)
4716{
4717 Mutex::Autolock lock(mRegistryLock);
4718 auto itr = _register.find(internalFrameNumber);
4719 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004720 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004721 return -ENOENT;
4722 }
4723
4724 frameworkFrameNumber = itr->second;
4725 purgeOldEntriesLocked();
4726 return NO_ERROR;
4727}
Thierry Strudel3d639192016-09-09 11:52:26 -07004728
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004729status_t QCamera3HardwareInterface::fillPbStreamConfig(
4730 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4731 QCamera3Channel *channel, uint32_t streamIndex) {
4732 if (config == nullptr) {
4733 LOGE("%s: config is null", __FUNCTION__);
4734 return BAD_VALUE;
4735 }
4736
4737 if (channel == nullptr) {
4738 LOGE("%s: channel is null", __FUNCTION__);
4739 return BAD_VALUE;
4740 }
4741
4742 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4743 if (stream == nullptr) {
4744 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4745 return NAME_NOT_FOUND;
4746 }
4747
4748 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4749 if (streamInfo == nullptr) {
4750 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4751 return NAME_NOT_FOUND;
4752 }
4753
4754 config->id = pbStreamId;
4755 config->image.width = streamInfo->dim.width;
4756 config->image.height = streamInfo->dim.height;
4757 config->image.padding = 0;
4758 config->image.format = pbStreamFormat;
4759
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004760 uint32_t totalPlaneSize = 0;
4761
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004762 // Fill plane information.
4763 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4764 pbcamera::PlaneConfiguration plane;
4765 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4766 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4767 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004768
4769 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004770 }
4771
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004772 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004773 return OK;
4774}
4775
Thierry Strudel3d639192016-09-09 11:52:26 -07004776/*===========================================================================
4777 * FUNCTION : processCaptureRequest
4778 *
4779 * DESCRIPTION: process a capture request from camera service
4780 *
4781 * PARAMETERS :
4782 * @request : request from framework to process
4783 *
4784 * RETURN :
4785 *
4786 *==========================================================================*/
4787int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004788 camera3_capture_request_t *request,
4789 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004790{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004791 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004792 int rc = NO_ERROR;
4793 int32_t request_id;
4794 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004795 bool isVidBufRequested = false;
4796 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004797 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004798
4799 pthread_mutex_lock(&mMutex);
4800
4801 // Validate current state
4802 switch (mState) {
4803 case CONFIGURED:
4804 case STARTED:
4805 /* valid state */
4806 break;
4807
4808 case ERROR:
4809 pthread_mutex_unlock(&mMutex);
4810 handleCameraDeviceError();
4811 return -ENODEV;
4812
4813 default:
4814 LOGE("Invalid state %d", mState);
4815 pthread_mutex_unlock(&mMutex);
4816 return -ENODEV;
4817 }
4818
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004819 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004820 if (rc != NO_ERROR) {
4821 LOGE("incoming request is not valid");
4822 pthread_mutex_unlock(&mMutex);
4823 return rc;
4824 }
4825
4826 meta = request->settings;
4827
4828 // For first capture request, send capture intent, and
4829 // stream on all streams
4830 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004831 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 // send an unconfigure to the backend so that the isp
4833 // resources are deallocated
4834 if (!mFirstConfiguration) {
4835 cam_stream_size_info_t stream_config_info;
4836 int32_t hal_version = CAM_HAL_V3;
4837 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4838 stream_config_info.buffer_info.min_buffers =
4839 MIN_INFLIGHT_REQUESTS;
4840 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004841 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004842 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004843 clear_metadata_buffer(mParameters);
4844 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4845 CAM_INTF_PARM_HAL_VERSION, hal_version);
4846 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4847 CAM_INTF_META_STREAM_INFO, stream_config_info);
4848 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4849 mParameters);
4850 if (rc < 0) {
4851 LOGE("set_parms for unconfigure failed");
4852 pthread_mutex_unlock(&mMutex);
4853 return rc;
4854 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004855
Thierry Strudel3d639192016-09-09 11:52:26 -07004856 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004857 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004858 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004859 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004860 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004861 property_get("persist.camera.is_type", is_type_value, "4");
4862 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4863 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4864 property_get("persist.camera.is_type_preview", is_type_value, "4");
4865 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4866 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004867
4868 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4869 int32_t hal_version = CAM_HAL_V3;
4870 uint8_t captureIntent =
4871 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4872 mCaptureIntent = captureIntent;
4873 clear_metadata_buffer(mParameters);
4874 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4875 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4876 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004877 if (mFirstConfiguration) {
4878 // configure instant AEC
4879 // Instant AEC is a session based parameter and it is needed only
4880 // once per complete session after open camera.
4881 // i.e. This is set only once for the first capture request, after open camera.
4882 setInstantAEC(meta);
4883 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004884 uint8_t fwkVideoStabMode=0;
4885 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4886 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4887 }
4888
Xue Tuecac74e2017-04-17 13:58:15 -07004889 // If EIS setprop is enabled then only turn it on for video/preview
4890 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004891 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004892 int32_t vsMode;
4893 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4894 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4895 rc = BAD_VALUE;
4896 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004897 LOGD("setEis %d", setEis);
4898 bool eis3Supported = false;
4899 size_t count = IS_TYPE_MAX;
4900 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4901 for (size_t i = 0; i < count; i++) {
4902 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4903 eis3Supported = true;
4904 break;
4905 }
4906 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004907
4908 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004909 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004910 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4911 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004912 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4913 is_type = isTypePreview;
4914 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4915 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4916 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004917 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004918 } else {
4919 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004920 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004921 } else {
4922 is_type = IS_TYPE_NONE;
4923 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004924 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004925 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004926 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4927 }
4928 }
4929
4930 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4931 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4932
Thierry Strudel54dc9782017-02-15 12:12:10 -08004933 //Disable tintless only if the property is set to 0
4934 memset(prop, 0, sizeof(prop));
4935 property_get("persist.camera.tintless.enable", prop, "1");
4936 int32_t tintless_value = atoi(prop);
4937
Thierry Strudel3d639192016-09-09 11:52:26 -07004938 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4939 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004940
Thierry Strudel3d639192016-09-09 11:52:26 -07004941 //Disable CDS for HFR mode or if DIS/EIS is on.
4942 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4943 //after every configure_stream
4944 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4945 (m_bIsVideo)) {
4946 int32_t cds = CAM_CDS_MODE_OFF;
4947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4948 CAM_INTF_PARM_CDS_MODE, cds))
4949 LOGE("Failed to disable CDS for HFR mode");
4950
4951 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004952
4953 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4954 uint8_t* use_av_timer = NULL;
4955
4956 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004957 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004958 use_av_timer = &m_debug_avtimer;
4959 }
4960 else{
4961 use_av_timer =
4962 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004963 if (use_av_timer) {
4964 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4965 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004966 }
4967
4968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4969 rc = BAD_VALUE;
4970 }
4971 }
4972
Thierry Strudel3d639192016-09-09 11:52:26 -07004973 setMobicat();
4974
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004975 uint8_t nrMode = 0;
4976 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4977 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4978 }
4979
Thierry Strudel3d639192016-09-09 11:52:26 -07004980 /* Set fps and hfr mode while sending meta stream info so that sensor
4981 * can configure appropriate streaming mode */
4982 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004983 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4984 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004985 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4986 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004987 if (rc == NO_ERROR) {
4988 int32_t max_fps =
4989 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004990 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004991 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4992 }
4993 /* For HFR, more buffers are dequeued upfront to improve the performance */
4994 if (mBatchSize) {
4995 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4996 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4997 }
4998 }
4999 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005000 LOGE("setHalFpsRange failed");
5001 }
5002 }
5003 if (meta.exists(ANDROID_CONTROL_MODE)) {
5004 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5005 rc = extractSceneMode(meta, metaMode, mParameters);
5006 if (rc != NO_ERROR) {
5007 LOGE("extractSceneMode failed");
5008 }
5009 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005010 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005011
Thierry Strudel04e026f2016-10-10 11:27:36 -07005012 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5013 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5014 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5015 rc = setVideoHdrMode(mParameters, vhdr);
5016 if (rc != NO_ERROR) {
5017 LOGE("setVideoHDR is failed");
5018 }
5019 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005020
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005021 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005022 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005023 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005024 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5025 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5026 sensorModeFullFov)) {
5027 rc = BAD_VALUE;
5028 }
5029 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005030 //TODO: validate the arguments, HSV scenemode should have only the
5031 //advertised fps ranges
5032
5033 /*set the capture intent, hal version, tintless, stream info,
5034 *and disenable parameters to the backend*/
5035 LOGD("set_parms META_STREAM_INFO " );
5036 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005037 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5038 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005039 mStreamConfigInfo.type[i],
5040 mStreamConfigInfo.stream_sizes[i].width,
5041 mStreamConfigInfo.stream_sizes[i].height,
5042 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005043 mStreamConfigInfo.format[i],
5044 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005045 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005046
Thierry Strudel3d639192016-09-09 11:52:26 -07005047 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5048 mParameters);
5049 if (rc < 0) {
5050 LOGE("set_parms failed for hal version, stream info");
5051 }
5052
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005053 cam_sensor_mode_info_t sensorModeInfo = {};
5054 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005055 if (rc != NO_ERROR) {
5056 LOGE("Failed to get sensor output size");
5057 pthread_mutex_unlock(&mMutex);
5058 goto error_exit;
5059 }
5060
5061 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5062 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005063 sensorModeInfo.active_array_size.width,
5064 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005065
5066 /* Set batchmode before initializing channel. Since registerBuffer
5067 * internally initializes some of the channels, better set batchmode
5068 * even before first register buffer */
5069 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5070 it != mStreamInfo.end(); it++) {
5071 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5072 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5073 && mBatchSize) {
5074 rc = channel->setBatchSize(mBatchSize);
5075 //Disable per frame map unmap for HFR/batchmode case
5076 rc |= channel->setPerFrameMapUnmap(false);
5077 if (NO_ERROR != rc) {
5078 LOGE("Channel init failed %d", rc);
5079 pthread_mutex_unlock(&mMutex);
5080 goto error_exit;
5081 }
5082 }
5083 }
5084
5085 //First initialize all streams
5086 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5087 it != mStreamInfo.end(); it++) {
5088 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005089
5090 /* Initial value of NR mode is needed before stream on */
5091 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005092 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5093 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005094 setEis) {
5095 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5096 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5097 is_type = mStreamConfigInfo.is_type[i];
5098 break;
5099 }
5100 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005101 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005102 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005103 rc = channel->initialize(IS_TYPE_NONE);
5104 }
5105 if (NO_ERROR != rc) {
5106 LOGE("Channel initialization failed %d", rc);
5107 pthread_mutex_unlock(&mMutex);
5108 goto error_exit;
5109 }
5110 }
5111
5112 if (mRawDumpChannel) {
5113 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5114 if (rc != NO_ERROR) {
5115 LOGE("Error: Raw Dump Channel init failed");
5116 pthread_mutex_unlock(&mMutex);
5117 goto error_exit;
5118 }
5119 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005120 if (mHdrPlusRawSrcChannel) {
5121 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5122 if (rc != NO_ERROR) {
5123 LOGE("Error: HDR+ RAW Source Channel init failed");
5124 pthread_mutex_unlock(&mMutex);
5125 goto error_exit;
5126 }
5127 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005128 if (mSupportChannel) {
5129 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5130 if (rc < 0) {
5131 LOGE("Support channel initialization failed");
5132 pthread_mutex_unlock(&mMutex);
5133 goto error_exit;
5134 }
5135 }
5136 if (mAnalysisChannel) {
5137 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5138 if (rc < 0) {
5139 LOGE("Analysis channel initialization failed");
5140 pthread_mutex_unlock(&mMutex);
5141 goto error_exit;
5142 }
5143 }
5144 if (mDummyBatchChannel) {
5145 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5146 if (rc < 0) {
5147 LOGE("mDummyBatchChannel setBatchSize failed");
5148 pthread_mutex_unlock(&mMutex);
5149 goto error_exit;
5150 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005151 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005152 if (rc < 0) {
5153 LOGE("mDummyBatchChannel initialization failed");
5154 pthread_mutex_unlock(&mMutex);
5155 goto error_exit;
5156 }
5157 }
5158
5159 // Set bundle info
5160 rc = setBundleInfo();
5161 if (rc < 0) {
5162 LOGE("setBundleInfo failed %d", rc);
5163 pthread_mutex_unlock(&mMutex);
5164 goto error_exit;
5165 }
5166
5167 //update settings from app here
5168 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5169 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5170 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5171 }
5172 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5173 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5174 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5175 }
5176 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5177 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5178 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5179
5180 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5181 (mLinkedCameraId != mCameraId) ) {
5182 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5183 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005184 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005185 goto error_exit;
5186 }
5187 }
5188
5189 // add bundle related cameras
5190 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5191 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005192 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5193 &m_pDualCamCmdPtr->bundle_info;
5194 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005195 if (mIsDeviceLinked)
5196 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5197 else
5198 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5199
5200 pthread_mutex_lock(&gCamLock);
5201
5202 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5203 LOGE("Dualcam: Invalid Session Id ");
5204 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005205 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005206 goto error_exit;
5207 }
5208
5209 if (mIsMainCamera == 1) {
5210 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5211 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005212 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005213 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005214 // related session id should be session id of linked session
5215 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5216 } else {
5217 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5218 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005219 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005220 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005221 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5222 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005223 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005224 pthread_mutex_unlock(&gCamLock);
5225
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005226 rc = mCameraHandle->ops->set_dual_cam_cmd(
5227 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 if (rc < 0) {
5229 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005230 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005231 goto error_exit;
5232 }
5233 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005234 goto no_error;
5235error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005236 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005237 return rc;
5238no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005239 mWokenUpByDaemon = false;
5240 mPendingLiveRequest = 0;
5241 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005242 }
5243
5244 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005245 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005246
5247 if (mFlushPerf) {
5248 //we cannot accept any requests during flush
5249 LOGE("process_capture_request cannot proceed during flush");
5250 pthread_mutex_unlock(&mMutex);
5251 return NO_ERROR; //should return an error
5252 }
5253
5254 if (meta.exists(ANDROID_REQUEST_ID)) {
5255 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5256 mCurrentRequestId = request_id;
5257 LOGD("Received request with id: %d", request_id);
5258 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5259 LOGE("Unable to find request id field, \
5260 & no previous id available");
5261 pthread_mutex_unlock(&mMutex);
5262 return NAME_NOT_FOUND;
5263 } else {
5264 LOGD("Re-using old request id");
5265 request_id = mCurrentRequestId;
5266 }
5267
5268 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5269 request->num_output_buffers,
5270 request->input_buffer,
5271 frameNumber);
5272 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005273 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005274 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005275 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005276 uint32_t snapshotStreamId = 0;
5277 for (size_t i = 0; i < request->num_output_buffers; i++) {
5278 const camera3_stream_buffer_t& output = request->output_buffers[i];
5279 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5280
Emilian Peev7650c122017-01-19 08:24:33 -08005281 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5282 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005283 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005284 blob_request = 1;
5285 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5286 }
5287
5288 if (output.acquire_fence != -1) {
5289 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5290 close(output.acquire_fence);
5291 if (rc != OK) {
5292 LOGE("sync wait failed %d", rc);
5293 pthread_mutex_unlock(&mMutex);
5294 return rc;
5295 }
5296 }
5297
Emilian Peev0f3c3162017-03-15 12:57:46 +00005298 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5299 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005300 depthRequestPresent = true;
5301 continue;
5302 }
5303
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005304 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005305 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005306
5307 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5308 isVidBufRequested = true;
5309 }
5310 }
5311
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005312 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5313 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5314 itr++) {
5315 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5316 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5317 channel->getStreamID(channel->getStreamTypeMask());
5318
5319 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5320 isVidBufRequested = true;
5321 }
5322 }
5323
Thierry Strudel3d639192016-09-09 11:52:26 -07005324 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005325 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005326 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005327 }
5328 if (blob_request && mRawDumpChannel) {
5329 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005330 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005331 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005332 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005333 }
5334
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005335 {
5336 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5337 // Request a RAW buffer if
5338 // 1. mHdrPlusRawSrcChannel is valid.
5339 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5340 // 3. There is no pending HDR+ request.
5341 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5342 mHdrPlusPendingRequests.size() == 0) {
5343 streamsArray.stream_request[streamsArray.num_streams].streamID =
5344 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5345 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5346 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005347 }
5348
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005349 //extract capture intent
5350 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5351 mCaptureIntent =
5352 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5353 }
5354
5355 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5356 mCacMode =
5357 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5358 }
5359
5360 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005361 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005362
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005363 {
5364 Mutex::Autolock l(gHdrPlusClientLock);
5365 // If this request has a still capture intent, try to submit an HDR+ request.
5366 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5367 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5368 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5369 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005370 }
5371
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005372 if (hdrPlusRequest) {
5373 // For a HDR+ request, just set the frame parameters.
5374 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5375 if (rc < 0) {
5376 LOGE("fail to set frame parameters");
5377 pthread_mutex_unlock(&mMutex);
5378 return rc;
5379 }
5380 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005381 /* Parse the settings:
5382 * - For every request in NORMAL MODE
5383 * - For every request in HFR mode during preview only case
5384 * - For first request of every batch in HFR mode during video
5385 * recording. In batchmode the same settings except frame number is
5386 * repeated in each request of the batch.
5387 */
5388 if (!mBatchSize ||
5389 (mBatchSize && !isVidBufRequested) ||
5390 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005391 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005392 if (rc < 0) {
5393 LOGE("fail to set frame parameters");
5394 pthread_mutex_unlock(&mMutex);
5395 return rc;
5396 }
5397 }
5398 /* For batchMode HFR, setFrameParameters is not called for every
5399 * request. But only frame number of the latest request is parsed.
5400 * Keep track of first and last frame numbers in a batch so that
5401 * metadata for the frame numbers of batch can be duplicated in
5402 * handleBatchMetadta */
5403 if (mBatchSize) {
5404 if (!mToBeQueuedVidBufs) {
5405 //start of the batch
5406 mFirstFrameNumberInBatch = request->frame_number;
5407 }
5408 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5409 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5410 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005411 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005412 return BAD_VALUE;
5413 }
5414 }
5415 if (mNeedSensorRestart) {
5416 /* Unlock the mutex as restartSensor waits on the channels to be
5417 * stopped, which in turn calls stream callback functions -
5418 * handleBufferWithLock and handleMetadataWithLock */
5419 pthread_mutex_unlock(&mMutex);
5420 rc = dynamicUpdateMetaStreamInfo();
5421 if (rc != NO_ERROR) {
5422 LOGE("Restarting the sensor failed");
5423 return BAD_VALUE;
5424 }
5425 mNeedSensorRestart = false;
5426 pthread_mutex_lock(&mMutex);
5427 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005428 if(mResetInstantAEC) {
5429 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5430 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5431 mResetInstantAEC = false;
5432 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005433 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005434 if (request->input_buffer->acquire_fence != -1) {
5435 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5436 close(request->input_buffer->acquire_fence);
5437 if (rc != OK) {
5438 LOGE("input buffer sync wait failed %d", rc);
5439 pthread_mutex_unlock(&mMutex);
5440 return rc;
5441 }
5442 }
5443 }
5444
5445 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5446 mLastCustIntentFrmNum = frameNumber;
5447 }
5448 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005449 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005450 pendingRequestIterator latestRequest;
5451 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005452 pendingRequest.num_buffers = depthRequestPresent ?
5453 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005454 pendingRequest.request_id = request_id;
5455 pendingRequest.blob_request = blob_request;
5456 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005457 if (request->input_buffer) {
5458 pendingRequest.input_buffer =
5459 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5460 *(pendingRequest.input_buffer) = *(request->input_buffer);
5461 pInputBuffer = pendingRequest.input_buffer;
5462 } else {
5463 pendingRequest.input_buffer = NULL;
5464 pInputBuffer = NULL;
5465 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005466 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005467
5468 pendingRequest.pipeline_depth = 0;
5469 pendingRequest.partial_result_cnt = 0;
5470 extractJpegMetadata(mCurJpegMeta, request);
5471 pendingRequest.jpegMetadata = mCurJpegMeta;
5472 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005473 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005474 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5475 mHybridAeEnable =
5476 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5477 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005478
5479 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5480 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005481 /* DevCamDebug metadata processCaptureRequest */
5482 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5483 mDevCamDebugMetaEnable =
5484 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5485 }
5486 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5487 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005488
5489 //extract CAC info
5490 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5491 mCacMode =
5492 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5493 }
5494 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005495 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005496 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5497 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005498
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005499 // extract enableZsl info
5500 if (gExposeEnableZslKey) {
5501 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5502 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5503 mZslEnabled = pendingRequest.enableZsl;
5504 } else {
5505 pendingRequest.enableZsl = mZslEnabled;
5506 }
5507 }
5508
Thierry Strudel3d639192016-09-09 11:52:26 -07005509 PendingBuffersInRequest bufsForCurRequest;
5510 bufsForCurRequest.frame_number = frameNumber;
5511 // Mark current timestamp for the new request
5512 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005513 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005514
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005515 if (hdrPlusRequest) {
5516 // Save settings for this request.
5517 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5518 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5519
5520 // Add to pending HDR+ request queue.
5521 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5522 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5523
5524 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5525 }
5526
Thierry Strudel3d639192016-09-09 11:52:26 -07005527 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005528 if ((request->output_buffers[i].stream->data_space ==
5529 HAL_DATASPACE_DEPTH) &&
5530 (HAL_PIXEL_FORMAT_BLOB ==
5531 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005532 continue;
5533 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005534 RequestedBufferInfo requestedBuf;
5535 memset(&requestedBuf, 0, sizeof(requestedBuf));
5536 requestedBuf.stream = request->output_buffers[i].stream;
5537 requestedBuf.buffer = NULL;
5538 pendingRequest.buffers.push_back(requestedBuf);
5539
5540 // Add to buffer handle the pending buffers list
5541 PendingBufferInfo bufferInfo;
5542 bufferInfo.buffer = request->output_buffers[i].buffer;
5543 bufferInfo.stream = request->output_buffers[i].stream;
5544 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5545 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5546 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5547 frameNumber, bufferInfo.buffer,
5548 channel->getStreamTypeMask(), bufferInfo.stream->format);
5549 }
5550 // Add this request packet into mPendingBuffersMap
5551 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5552 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5553 mPendingBuffersMap.get_num_overall_buffers());
5554
5555 latestRequest = mPendingRequestsList.insert(
5556 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005557
5558 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5559 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005560 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005561 for (size_t i = 0; i < request->num_output_buffers; i++) {
5562 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5563 }
5564
Thierry Strudel3d639192016-09-09 11:52:26 -07005565 if(mFlush) {
5566 LOGI("mFlush is true");
5567 pthread_mutex_unlock(&mMutex);
5568 return NO_ERROR;
5569 }
5570
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005571 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5572 // channel.
5573 if (!hdrPlusRequest) {
5574 int indexUsed;
5575 // Notify metadata channel we receive a request
5576 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005577
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005578 if(request->input_buffer != NULL){
5579 LOGD("Input request, frame_number %d", frameNumber);
5580 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5581 if (NO_ERROR != rc) {
5582 LOGE("fail to set reproc parameters");
5583 pthread_mutex_unlock(&mMutex);
5584 return rc;
5585 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005586 }
5587
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005588 // Call request on other streams
5589 uint32_t streams_need_metadata = 0;
5590 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5591 for (size_t i = 0; i < request->num_output_buffers; i++) {
5592 const camera3_stream_buffer_t& output = request->output_buffers[i];
5593 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5594
5595 if (channel == NULL) {
5596 LOGW("invalid channel pointer for stream");
5597 continue;
5598 }
5599
5600 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5601 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5602 output.buffer, request->input_buffer, frameNumber);
5603 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005604 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005605 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5606 if (rc < 0) {
5607 LOGE("Fail to request on picture channel");
5608 pthread_mutex_unlock(&mMutex);
5609 return rc;
5610 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005611 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005612 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5613 assert(NULL != mDepthChannel);
5614 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005615
Emilian Peev7650c122017-01-19 08:24:33 -08005616 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5617 if (rc < 0) {
5618 LOGE("Fail to map on depth buffer");
5619 pthread_mutex_unlock(&mMutex);
5620 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005621 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005622 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005623 } else {
5624 LOGD("snapshot request with buffer %p, frame_number %d",
5625 output.buffer, frameNumber);
5626 if (!request->settings) {
5627 rc = channel->request(output.buffer, frameNumber,
5628 NULL, mPrevParameters, indexUsed);
5629 } else {
5630 rc = channel->request(output.buffer, frameNumber,
5631 NULL, mParameters, indexUsed);
5632 }
5633 if (rc < 0) {
5634 LOGE("Fail to request on picture channel");
5635 pthread_mutex_unlock(&mMutex);
5636 return rc;
5637 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005638
Emilian Peev7650c122017-01-19 08:24:33 -08005639 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5640 uint32_t j = 0;
5641 for (j = 0; j < streamsArray.num_streams; j++) {
5642 if (streamsArray.stream_request[j].streamID == streamId) {
5643 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5644 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5645 else
5646 streamsArray.stream_request[j].buf_index = indexUsed;
5647 break;
5648 }
5649 }
5650 if (j == streamsArray.num_streams) {
5651 LOGE("Did not find matching stream to update index");
5652 assert(0);
5653 }
5654
5655 pendingBufferIter->need_metadata = true;
5656 streams_need_metadata++;
5657 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005658 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005659 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5660 bool needMetadata = false;
5661 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5662 rc = yuvChannel->request(output.buffer, frameNumber,
5663 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5664 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005665 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005666 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005667 pthread_mutex_unlock(&mMutex);
5668 return rc;
5669 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005670
5671 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5672 uint32_t j = 0;
5673 for (j = 0; j < streamsArray.num_streams; j++) {
5674 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005675 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5676 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5677 else
5678 streamsArray.stream_request[j].buf_index = indexUsed;
5679 break;
5680 }
5681 }
5682 if (j == streamsArray.num_streams) {
5683 LOGE("Did not find matching stream to update index");
5684 assert(0);
5685 }
5686
5687 pendingBufferIter->need_metadata = needMetadata;
5688 if (needMetadata)
5689 streams_need_metadata += 1;
5690 LOGD("calling YUV channel request, need_metadata is %d",
5691 needMetadata);
5692 } else {
5693 LOGD("request with buffer %p, frame_number %d",
5694 output.buffer, frameNumber);
5695
5696 rc = channel->request(output.buffer, frameNumber, indexUsed);
5697
5698 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5699 uint32_t j = 0;
5700 for (j = 0; j < streamsArray.num_streams; j++) {
5701 if (streamsArray.stream_request[j].streamID == streamId) {
5702 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5703 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5704 else
5705 streamsArray.stream_request[j].buf_index = indexUsed;
5706 break;
5707 }
5708 }
5709 if (j == streamsArray.num_streams) {
5710 LOGE("Did not find matching stream to update index");
5711 assert(0);
5712 }
5713
5714 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5715 && mBatchSize) {
5716 mToBeQueuedVidBufs++;
5717 if (mToBeQueuedVidBufs == mBatchSize) {
5718 channel->queueBatchBuf();
5719 }
5720 }
5721 if (rc < 0) {
5722 LOGE("request failed");
5723 pthread_mutex_unlock(&mMutex);
5724 return rc;
5725 }
5726 }
5727 pendingBufferIter++;
5728 }
5729
5730 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5731 itr++) {
5732 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5733
5734 if (channel == NULL) {
5735 LOGE("invalid channel pointer for stream");
5736 assert(0);
5737 return BAD_VALUE;
5738 }
5739
5740 InternalRequest requestedStream;
5741 requestedStream = (*itr);
5742
5743
5744 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5745 LOGD("snapshot request internally input buffer %p, frame_number %d",
5746 request->input_buffer, frameNumber);
5747 if(request->input_buffer != NULL){
5748 rc = channel->request(NULL, frameNumber,
5749 pInputBuffer, &mReprocMeta, indexUsed, true,
5750 requestedStream.meteringOnly);
5751 if (rc < 0) {
5752 LOGE("Fail to request on picture channel");
5753 pthread_mutex_unlock(&mMutex);
5754 return rc;
5755 }
5756 } else {
5757 LOGD("snapshot request with frame_number %d", frameNumber);
5758 if (!request->settings) {
5759 rc = channel->request(NULL, frameNumber,
5760 NULL, mPrevParameters, indexUsed, true,
5761 requestedStream.meteringOnly);
5762 } else {
5763 rc = channel->request(NULL, frameNumber,
5764 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5765 }
5766 if (rc < 0) {
5767 LOGE("Fail to request on picture channel");
5768 pthread_mutex_unlock(&mMutex);
5769 return rc;
5770 }
5771
5772 if ((*itr).meteringOnly != 1) {
5773 requestedStream.need_metadata = 1;
5774 streams_need_metadata++;
5775 }
5776 }
5777
5778 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5779 uint32_t j = 0;
5780 for (j = 0; j < streamsArray.num_streams; j++) {
5781 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005782 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5783 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5784 else
5785 streamsArray.stream_request[j].buf_index = indexUsed;
5786 break;
5787 }
5788 }
5789 if (j == streamsArray.num_streams) {
5790 LOGE("Did not find matching stream to update index");
5791 assert(0);
5792 }
5793
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005794 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005795 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005796 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005797 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005798 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005799 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005800 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005801
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005802 //If 2 streams have need_metadata set to true, fail the request, unless
5803 //we copy/reference count the metadata buffer
5804 if (streams_need_metadata > 1) {
5805 LOGE("not supporting request in which two streams requires"
5806 " 2 HAL metadata for reprocessing");
5807 pthread_mutex_unlock(&mMutex);
5808 return -EINVAL;
5809 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005810
Emilian Peev656e4fa2017-06-02 16:47:04 +01005811 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5812 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5813 if (depthRequestPresent && mDepthChannel) {
5814 if (request->settings) {
5815 camera_metadata_ro_entry entry;
5816 if (find_camera_metadata_ro_entry(request->settings,
5817 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5818 if (entry.data.u8[0]) {
5819 pdafEnable = CAM_PD_DATA_ENABLED;
5820 } else {
5821 pdafEnable = CAM_PD_DATA_SKIP;
5822 }
5823 mDepthCloudMode = pdafEnable;
5824 } else {
5825 pdafEnable = mDepthCloudMode;
5826 }
5827 } else {
5828 pdafEnable = mDepthCloudMode;
5829 }
5830 }
5831
Emilian Peev7650c122017-01-19 08:24:33 -08005832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5833 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5834 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5835 pthread_mutex_unlock(&mMutex);
5836 return BAD_VALUE;
5837 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005838
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005839 if (request->input_buffer == NULL) {
5840 /* Set the parameters to backend:
5841 * - For every request in NORMAL MODE
5842 * - For every request in HFR mode during preview only case
5843 * - Once every batch in HFR mode during video recording
5844 */
5845 if (!mBatchSize ||
5846 (mBatchSize && !isVidBufRequested) ||
5847 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5848 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5849 mBatchSize, isVidBufRequested,
5850 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005851
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005852 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5853 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5854 uint32_t m = 0;
5855 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5856 if (streamsArray.stream_request[k].streamID ==
5857 mBatchedStreamsArray.stream_request[m].streamID)
5858 break;
5859 }
5860 if (m == mBatchedStreamsArray.num_streams) {
5861 mBatchedStreamsArray.stream_request\
5862 [mBatchedStreamsArray.num_streams].streamID =
5863 streamsArray.stream_request[k].streamID;
5864 mBatchedStreamsArray.stream_request\
5865 [mBatchedStreamsArray.num_streams].buf_index =
5866 streamsArray.stream_request[k].buf_index;
5867 mBatchedStreamsArray.num_streams =
5868 mBatchedStreamsArray.num_streams + 1;
5869 }
5870 }
5871 streamsArray = mBatchedStreamsArray;
5872 }
5873 /* Update stream id of all the requested buffers */
5874 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5875 streamsArray)) {
5876 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005877 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005878 return BAD_VALUE;
5879 }
5880
5881 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5882 mParameters);
5883 if (rc < 0) {
5884 LOGE("set_parms failed");
5885 }
5886 /* reset to zero coz, the batch is queued */
5887 mToBeQueuedVidBufs = 0;
5888 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5889 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5890 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005891 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5892 uint32_t m = 0;
5893 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5894 if (streamsArray.stream_request[k].streamID ==
5895 mBatchedStreamsArray.stream_request[m].streamID)
5896 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005897 }
5898 if (m == mBatchedStreamsArray.num_streams) {
5899 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5900 streamID = streamsArray.stream_request[k].streamID;
5901 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5902 buf_index = streamsArray.stream_request[k].buf_index;
5903 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5904 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005905 }
5906 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005907 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005908
5909 // Start all streams after the first setting is sent, so that the
5910 // setting can be applied sooner: (0 + apply_delay)th frame.
5911 if (mState == CONFIGURED && mChannelHandle) {
5912 //Then start them.
5913 LOGH("Start META Channel");
5914 rc = mMetadataChannel->start();
5915 if (rc < 0) {
5916 LOGE("META channel start failed");
5917 pthread_mutex_unlock(&mMutex);
5918 return rc;
5919 }
5920
5921 if (mAnalysisChannel) {
5922 rc = mAnalysisChannel->start();
5923 if (rc < 0) {
5924 LOGE("Analysis channel start failed");
5925 mMetadataChannel->stop();
5926 pthread_mutex_unlock(&mMutex);
5927 return rc;
5928 }
5929 }
5930
5931 if (mSupportChannel) {
5932 rc = mSupportChannel->start();
5933 if (rc < 0) {
5934 LOGE("Support channel start failed");
5935 mMetadataChannel->stop();
5936 /* Although support and analysis are mutually exclusive today
5937 adding it in anycase for future proofing */
5938 if (mAnalysisChannel) {
5939 mAnalysisChannel->stop();
5940 }
5941 pthread_mutex_unlock(&mMutex);
5942 return rc;
5943 }
5944 }
5945 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5946 it != mStreamInfo.end(); it++) {
5947 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5948 LOGH("Start Processing Channel mask=%d",
5949 channel->getStreamTypeMask());
5950 rc = channel->start();
5951 if (rc < 0) {
5952 LOGE("channel start failed");
5953 pthread_mutex_unlock(&mMutex);
5954 return rc;
5955 }
5956 }
5957
5958 if (mRawDumpChannel) {
5959 LOGD("Starting raw dump stream");
5960 rc = mRawDumpChannel->start();
5961 if (rc != NO_ERROR) {
5962 LOGE("Error Starting Raw Dump Channel");
5963 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5964 it != mStreamInfo.end(); it++) {
5965 QCamera3Channel *channel =
5966 (QCamera3Channel *)(*it)->stream->priv;
5967 LOGH("Stopping Processing Channel mask=%d",
5968 channel->getStreamTypeMask());
5969 channel->stop();
5970 }
5971 if (mSupportChannel)
5972 mSupportChannel->stop();
5973 if (mAnalysisChannel) {
5974 mAnalysisChannel->stop();
5975 }
5976 mMetadataChannel->stop();
5977 pthread_mutex_unlock(&mMutex);
5978 return rc;
5979 }
5980 }
5981
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005982 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005983 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005984 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005985 if (rc != NO_ERROR) {
5986 LOGE("start_channel failed %d", rc);
5987 pthread_mutex_unlock(&mMutex);
5988 return rc;
5989 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005990
5991 {
5992 // Configure Easel for stream on.
5993 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005994
5995 // Now that sensor mode should have been selected, get the selected sensor mode
5996 // info.
5997 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5998 getCurrentSensorModeInfo(mSensorModeInfo);
5999
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006000 if (EaselManagerClientOpened) {
6001 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen44abb642017-06-02 18:00:38 -07006002 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6003 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006004 if (rc != OK) {
6005 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6006 mCameraId, mSensorModeInfo.op_pixel_clk);
6007 pthread_mutex_unlock(&mMutex);
6008 return rc;
6009 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006010 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006011 }
6012 }
6013
6014 // Start sensor streaming.
6015 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6016 mChannelHandle);
6017 if (rc != NO_ERROR) {
6018 LOGE("start_sensor_stream_on failed %d", rc);
6019 pthread_mutex_unlock(&mMutex);
6020 return rc;
6021 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006022 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006023 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006024 }
6025
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006026 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07006027 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006028 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -07006029 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006030 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6031 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6032 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6033 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
6034 rc = enableHdrPlusModeLocked();
6035 if (rc != OK) {
6036 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6037 pthread_mutex_unlock(&mMutex);
6038 return rc;
6039 }
6040
6041 mFirstPreviewIntentSeen = true;
6042 }
6043 }
6044
Thierry Strudel3d639192016-09-09 11:52:26 -07006045 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6046
6047 mState = STARTED;
6048 // Added a timed condition wait
6049 struct timespec ts;
6050 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006051 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006052 if (rc < 0) {
6053 isValidTimeout = 0;
6054 LOGE("Error reading the real time clock!!");
6055 }
6056 else {
6057 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006058 int64_t timeout = 5;
6059 {
6060 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6061 // If there is a pending HDR+ request, the following requests may be blocked until the
6062 // HDR+ request is done. So allow a longer timeout.
6063 if (mHdrPlusPendingRequests.size() > 0) {
6064 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6065 }
6066 }
6067 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006068 }
6069 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006070 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006071 (mState != ERROR) && (mState != DEINIT)) {
6072 if (!isValidTimeout) {
6073 LOGD("Blocking on conditional wait");
6074 pthread_cond_wait(&mRequestCond, &mMutex);
6075 }
6076 else {
6077 LOGD("Blocking on timed conditional wait");
6078 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6079 if (rc == ETIMEDOUT) {
6080 rc = -ENODEV;
6081 LOGE("Unblocked on timeout!!!!");
6082 break;
6083 }
6084 }
6085 LOGD("Unblocked");
6086 if (mWokenUpByDaemon) {
6087 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006088 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006089 break;
6090 }
6091 }
6092 pthread_mutex_unlock(&mMutex);
6093
6094 return rc;
6095}
6096
6097/*===========================================================================
6098 * FUNCTION : dump
6099 *
6100 * DESCRIPTION:
6101 *
6102 * PARAMETERS :
6103 *
6104 *
6105 * RETURN :
6106 *==========================================================================*/
6107void QCamera3HardwareInterface::dump(int fd)
6108{
6109 pthread_mutex_lock(&mMutex);
6110 dprintf(fd, "\n Camera HAL3 information Begin \n");
6111
6112 dprintf(fd, "\nNumber of pending requests: %zu \n",
6113 mPendingRequestsList.size());
6114 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6115 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6116 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6117 for(pendingRequestIterator i = mPendingRequestsList.begin();
6118 i != mPendingRequestsList.end(); i++) {
6119 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6120 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6121 i->input_buffer);
6122 }
6123 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6124 mPendingBuffersMap.get_num_overall_buffers());
6125 dprintf(fd, "-------+------------------\n");
6126 dprintf(fd, " Frame | Stream type mask \n");
6127 dprintf(fd, "-------+------------------\n");
6128 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6129 for(auto &j : req.mPendingBufferList) {
6130 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6131 dprintf(fd, " %5d | %11d \n",
6132 req.frame_number, channel->getStreamTypeMask());
6133 }
6134 }
6135 dprintf(fd, "-------+------------------\n");
6136
6137 dprintf(fd, "\nPending frame drop list: %zu\n",
6138 mPendingFrameDropList.size());
6139 dprintf(fd, "-------+-----------\n");
6140 dprintf(fd, " Frame | Stream ID \n");
6141 dprintf(fd, "-------+-----------\n");
6142 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6143 i != mPendingFrameDropList.end(); i++) {
6144 dprintf(fd, " %5d | %9d \n",
6145 i->frame_number, i->stream_ID);
6146 }
6147 dprintf(fd, "-------+-----------\n");
6148
6149 dprintf(fd, "\n Camera HAL3 information End \n");
6150
6151 /* use dumpsys media.camera as trigger to send update debug level event */
6152 mUpdateDebugLevel = true;
6153 pthread_mutex_unlock(&mMutex);
6154 return;
6155}
6156
6157/*===========================================================================
6158 * FUNCTION : flush
6159 *
6160 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6161 * conditionally restarts channels
6162 *
6163 * PARAMETERS :
6164 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006165 * @ stopChannelImmediately: stop the channel immediately. This should be used
6166 * when device encountered an error and MIPI may has
6167 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006168 *
6169 * RETURN :
6170 * 0 on success
6171 * Error code on failure
6172 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006173int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006174{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006175 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006176 int32_t rc = NO_ERROR;
6177
6178 LOGD("Unblocking Process Capture Request");
6179 pthread_mutex_lock(&mMutex);
6180 mFlush = true;
6181 pthread_mutex_unlock(&mMutex);
6182
6183 rc = stopAllChannels();
6184 // unlink of dualcam
6185 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006186 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6187 &m_pDualCamCmdPtr->bundle_info;
6188 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006189 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6190 pthread_mutex_lock(&gCamLock);
6191
6192 if (mIsMainCamera == 1) {
6193 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6194 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006195 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006196 // related session id should be session id of linked session
6197 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6198 } else {
6199 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6200 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006201 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006202 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6203 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006204 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006205 pthread_mutex_unlock(&gCamLock);
6206
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006207 rc = mCameraHandle->ops->set_dual_cam_cmd(
6208 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006209 if (rc < 0) {
6210 LOGE("Dualcam: Unlink failed, but still proceed to close");
6211 }
6212 }
6213
6214 if (rc < 0) {
6215 LOGE("stopAllChannels failed");
6216 return rc;
6217 }
6218 if (mChannelHandle) {
6219 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006220 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006221 }
6222
6223 // Reset bundle info
6224 rc = setBundleInfo();
6225 if (rc < 0) {
6226 LOGE("setBundleInfo failed %d", rc);
6227 return rc;
6228 }
6229
6230 // Mutex Lock
6231 pthread_mutex_lock(&mMutex);
6232
6233 // Unblock process_capture_request
6234 mPendingLiveRequest = 0;
6235 pthread_cond_signal(&mRequestCond);
6236
6237 rc = notifyErrorForPendingRequests();
6238 if (rc < 0) {
6239 LOGE("notifyErrorForPendingRequests failed");
6240 pthread_mutex_unlock(&mMutex);
6241 return rc;
6242 }
6243
6244 mFlush = false;
6245
6246 // Start the Streams/Channels
6247 if (restartChannels) {
6248 rc = startAllChannels();
6249 if (rc < 0) {
6250 LOGE("startAllChannels failed");
6251 pthread_mutex_unlock(&mMutex);
6252 return rc;
6253 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006254 if (mChannelHandle) {
6255 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006256 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006257 if (rc < 0) {
6258 LOGE("start_channel failed");
6259 pthread_mutex_unlock(&mMutex);
6260 return rc;
6261 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006262 }
6263 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006264 pthread_mutex_unlock(&mMutex);
6265
6266 return 0;
6267}
6268
6269/*===========================================================================
6270 * FUNCTION : flushPerf
6271 *
6272 * DESCRIPTION: This is the performance optimization version of flush that does
6273 * not use stream off, rather flushes the system
6274 *
6275 * PARAMETERS :
6276 *
6277 *
6278 * RETURN : 0 : success
6279 * -EINVAL: input is malformed (device is not valid)
6280 * -ENODEV: if the device has encountered a serious error
6281 *==========================================================================*/
6282int QCamera3HardwareInterface::flushPerf()
6283{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006284 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006285 int32_t rc = 0;
6286 struct timespec timeout;
6287 bool timed_wait = false;
6288
6289 pthread_mutex_lock(&mMutex);
6290 mFlushPerf = true;
6291 mPendingBuffersMap.numPendingBufsAtFlush =
6292 mPendingBuffersMap.get_num_overall_buffers();
6293 LOGD("Calling flush. Wait for %d buffers to return",
6294 mPendingBuffersMap.numPendingBufsAtFlush);
6295
6296 /* send the flush event to the backend */
6297 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6298 if (rc < 0) {
6299 LOGE("Error in flush: IOCTL failure");
6300 mFlushPerf = false;
6301 pthread_mutex_unlock(&mMutex);
6302 return -ENODEV;
6303 }
6304
6305 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6306 LOGD("No pending buffers in HAL, return flush");
6307 mFlushPerf = false;
6308 pthread_mutex_unlock(&mMutex);
6309 return rc;
6310 }
6311
6312 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006313 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006314 if (rc < 0) {
6315 LOGE("Error reading the real time clock, cannot use timed wait");
6316 } else {
6317 timeout.tv_sec += FLUSH_TIMEOUT;
6318 timed_wait = true;
6319 }
6320
6321 //Block on conditional variable
6322 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6323 LOGD("Waiting on mBuffersCond");
6324 if (!timed_wait) {
6325 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6326 if (rc != 0) {
6327 LOGE("pthread_cond_wait failed due to rc = %s",
6328 strerror(rc));
6329 break;
6330 }
6331 } else {
6332 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6333 if (rc != 0) {
6334 LOGE("pthread_cond_timedwait failed due to rc = %s",
6335 strerror(rc));
6336 break;
6337 }
6338 }
6339 }
6340 if (rc != 0) {
6341 mFlushPerf = false;
6342 pthread_mutex_unlock(&mMutex);
6343 return -ENODEV;
6344 }
6345
6346 LOGD("Received buffers, now safe to return them");
6347
6348 //make sure the channels handle flush
6349 //currently only required for the picture channel to release snapshot resources
6350 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6351 it != mStreamInfo.end(); it++) {
6352 QCamera3Channel *channel = (*it)->channel;
6353 if (channel) {
6354 rc = channel->flush();
6355 if (rc) {
6356 LOGE("Flushing the channels failed with error %d", rc);
6357 // even though the channel flush failed we need to continue and
6358 // return the buffers we have to the framework, however the return
6359 // value will be an error
6360 rc = -ENODEV;
6361 }
6362 }
6363 }
6364
6365 /* notify the frameworks and send errored results */
6366 rc = notifyErrorForPendingRequests();
6367 if (rc < 0) {
6368 LOGE("notifyErrorForPendingRequests failed");
6369 pthread_mutex_unlock(&mMutex);
6370 return rc;
6371 }
6372
6373 //unblock process_capture_request
6374 mPendingLiveRequest = 0;
6375 unblockRequestIfNecessary();
6376
6377 mFlushPerf = false;
6378 pthread_mutex_unlock(&mMutex);
6379 LOGD ("Flush Operation complete. rc = %d", rc);
6380 return rc;
6381}
6382
6383/*===========================================================================
6384 * FUNCTION : handleCameraDeviceError
6385 *
6386 * DESCRIPTION: This function calls internal flush and notifies the error to
6387 * framework and updates the state variable.
6388 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006389 * PARAMETERS :
6390 * @stopChannelImmediately : stop channels immediately without waiting for
6391 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006392 *
6393 * RETURN : NO_ERROR on Success
6394 * Error code on failure
6395 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006396int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006397{
6398 int32_t rc = NO_ERROR;
6399
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006400 {
6401 Mutex::Autolock lock(mFlushLock);
6402 pthread_mutex_lock(&mMutex);
6403 if (mState != ERROR) {
6404 //if mState != ERROR, nothing to be done
6405 pthread_mutex_unlock(&mMutex);
6406 return NO_ERROR;
6407 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006408 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006409
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006410 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006411 if (NO_ERROR != rc) {
6412 LOGE("internal flush to handle mState = ERROR failed");
6413 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006414
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006415 pthread_mutex_lock(&mMutex);
6416 mState = DEINIT;
6417 pthread_mutex_unlock(&mMutex);
6418 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006419
6420 camera3_notify_msg_t notify_msg;
6421 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6422 notify_msg.type = CAMERA3_MSG_ERROR;
6423 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6424 notify_msg.message.error.error_stream = NULL;
6425 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006426 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006427
6428 return rc;
6429}
6430
6431/*===========================================================================
6432 * FUNCTION : captureResultCb
6433 *
6434 * DESCRIPTION: Callback handler for all capture result
6435 * (streams, as well as metadata)
6436 *
6437 * PARAMETERS :
6438 * @metadata : metadata information
6439 * @buffer : actual gralloc buffer to be returned to frameworks.
6440 * NULL if metadata.
6441 *
6442 * RETURN : NONE
6443 *==========================================================================*/
6444void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6445 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6446{
6447 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006448 pthread_mutex_lock(&mMutex);
6449 uint8_t batchSize = mBatchSize;
6450 pthread_mutex_unlock(&mMutex);
6451 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006452 handleBatchMetadata(metadata_buf,
6453 true /* free_and_bufdone_meta_buf */);
6454 } else { /* mBatchSize = 0 */
6455 hdrPlusPerfLock(metadata_buf);
6456 pthread_mutex_lock(&mMutex);
6457 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006458 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006459 true /* last urgent frame of batch metadata */,
6460 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006461 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006462 pthread_mutex_unlock(&mMutex);
6463 }
6464 } else if (isInputBuffer) {
6465 pthread_mutex_lock(&mMutex);
6466 handleInputBufferWithLock(frame_number);
6467 pthread_mutex_unlock(&mMutex);
6468 } else {
6469 pthread_mutex_lock(&mMutex);
6470 handleBufferWithLock(buffer, frame_number);
6471 pthread_mutex_unlock(&mMutex);
6472 }
6473 return;
6474}
6475
6476/*===========================================================================
6477 * FUNCTION : getReprocessibleOutputStreamId
6478 *
6479 * DESCRIPTION: Get source output stream id for the input reprocess stream
6480 * based on size and format, which would be the largest
6481 * output stream if an input stream exists.
6482 *
6483 * PARAMETERS :
6484 * @id : return the stream id if found
6485 *
6486 * RETURN : int32_t type of status
6487 * NO_ERROR -- success
6488 * none-zero failure code
6489 *==========================================================================*/
6490int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6491{
6492 /* check if any output or bidirectional stream with the same size and format
6493 and return that stream */
6494 if ((mInputStreamInfo.dim.width > 0) &&
6495 (mInputStreamInfo.dim.height > 0)) {
6496 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6497 it != mStreamInfo.end(); it++) {
6498
6499 camera3_stream_t *stream = (*it)->stream;
6500 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6501 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6502 (stream->format == mInputStreamInfo.format)) {
6503 // Usage flag for an input stream and the source output stream
6504 // may be different.
6505 LOGD("Found reprocessible output stream! %p", *it);
6506 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6507 stream->usage, mInputStreamInfo.usage);
6508
6509 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6510 if (channel != NULL && channel->mStreams[0]) {
6511 id = channel->mStreams[0]->getMyServerID();
6512 return NO_ERROR;
6513 }
6514 }
6515 }
6516 } else {
6517 LOGD("No input stream, so no reprocessible output stream");
6518 }
6519 return NAME_NOT_FOUND;
6520}
6521
6522/*===========================================================================
6523 * FUNCTION : lookupFwkName
6524 *
6525 * DESCRIPTION: In case the enum is not same in fwk and backend
6526 * make sure the parameter is correctly propogated
6527 *
6528 * PARAMETERS :
6529 * @arr : map between the two enums
6530 * @len : len of the map
6531 * @hal_name : name of the hal_parm to map
6532 *
6533 * RETURN : int type of status
6534 * fwk_name -- success
6535 * none-zero failure code
6536 *==========================================================================*/
6537template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6538 size_t len, halType hal_name)
6539{
6540
6541 for (size_t i = 0; i < len; i++) {
6542 if (arr[i].hal_name == hal_name) {
6543 return arr[i].fwk_name;
6544 }
6545 }
6546
6547 /* Not able to find matching framework type is not necessarily
6548 * an error case. This happens when mm-camera supports more attributes
6549 * than the frameworks do */
6550 LOGH("Cannot find matching framework type");
6551 return NAME_NOT_FOUND;
6552}
6553
6554/*===========================================================================
6555 * FUNCTION : lookupHalName
6556 *
6557 * DESCRIPTION: In case the enum is not same in fwk and backend
6558 * make sure the parameter is correctly propogated
6559 *
6560 * PARAMETERS :
6561 * @arr : map between the two enums
6562 * @len : len of the map
6563 * @fwk_name : name of the hal_parm to map
6564 *
6565 * RETURN : int32_t type of status
6566 * hal_name -- success
6567 * none-zero failure code
6568 *==========================================================================*/
6569template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6570 size_t len, fwkType fwk_name)
6571{
6572 for (size_t i = 0; i < len; i++) {
6573 if (arr[i].fwk_name == fwk_name) {
6574 return arr[i].hal_name;
6575 }
6576 }
6577
6578 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6579 return NAME_NOT_FOUND;
6580}
6581
6582/*===========================================================================
6583 * FUNCTION : lookupProp
6584 *
6585 * DESCRIPTION: lookup a value by its name
6586 *
6587 * PARAMETERS :
6588 * @arr : map between the two enums
6589 * @len : size of the map
6590 * @name : name to be looked up
6591 *
6592 * RETURN : Value if found
6593 * CAM_CDS_MODE_MAX if not found
6594 *==========================================================================*/
6595template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6596 size_t len, const char *name)
6597{
6598 if (name) {
6599 for (size_t i = 0; i < len; i++) {
6600 if (!strcmp(arr[i].desc, name)) {
6601 return arr[i].val;
6602 }
6603 }
6604 }
6605 return CAM_CDS_MODE_MAX;
6606}
6607
6608/*===========================================================================
6609 *
6610 * DESCRIPTION:
6611 *
6612 * PARAMETERS :
6613 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006614 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006615 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006616 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6617 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006618 *
6619 * RETURN : camera_metadata_t*
6620 * metadata in a format specified by fwk
6621 *==========================================================================*/
6622camera_metadata_t*
6623QCamera3HardwareInterface::translateFromHalMetadata(
6624 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006625 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006626 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006627 bool lastMetadataInBatch,
6628 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006629{
6630 CameraMetadata camMetadata;
6631 camera_metadata_t *resultMetadata;
6632
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006633 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006634 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6635 * Timestamp is needed because it's used for shutter notify calculation.
6636 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006637 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006638 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006639 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006640 }
6641
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006642 if (pendingRequest.jpegMetadata.entryCount())
6643 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006644
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006645 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6646 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6647 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6648 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6649 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006650 if (mBatchSize == 0) {
6651 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006652 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006653 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006654
Samuel Ha68ba5172016-12-15 18:41:12 -08006655 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6656 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006657 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006658 // DevCamDebug metadata translateFromHalMetadata AF
6659 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6660 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6661 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6662 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6663 }
6664 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6665 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6666 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6667 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6668 }
6669 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6670 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6671 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6672 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6673 }
6674 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6675 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6676 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6677 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6678 }
6679 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6680 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6681 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6682 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6683 }
6684 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6685 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6686 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6687 *DevCamDebug_af_monitor_pdaf_target_pos;
6688 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6689 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6690 }
6691 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6692 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6693 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6694 *DevCamDebug_af_monitor_pdaf_confidence;
6695 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6696 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6697 }
6698 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6699 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6700 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6701 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6702 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6703 }
6704 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6705 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6706 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6707 *DevCamDebug_af_monitor_tof_target_pos;
6708 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6709 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6710 }
6711 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6712 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6713 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6714 *DevCamDebug_af_monitor_tof_confidence;
6715 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6716 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6717 }
6718 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6719 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6720 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6721 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6722 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6723 }
6724 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6725 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6726 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6727 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6728 &fwk_DevCamDebug_af_monitor_type_select, 1);
6729 }
6730 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6731 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6732 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6733 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6734 &fwk_DevCamDebug_af_monitor_refocus, 1);
6735 }
6736 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6737 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6738 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6739 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6740 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6741 }
6742 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6743 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6744 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6745 *DevCamDebug_af_search_pdaf_target_pos;
6746 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6747 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6748 }
6749 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6750 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6751 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6752 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6753 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6754 }
6755 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6756 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6757 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6758 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6759 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6760 }
6761 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6762 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6763 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6764 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6765 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6766 }
6767 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6768 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6769 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6770 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6771 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6772 }
6773 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6774 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6775 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6776 *DevCamDebug_af_search_tof_target_pos;
6777 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6778 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6779 }
6780 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6781 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6782 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6783 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6784 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6785 }
6786 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6787 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6788 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6789 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6790 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6791 }
6792 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6793 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6794 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6795 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6796 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6797 }
6798 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6799 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6800 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6801 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6802 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6803 }
6804 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6805 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6806 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6807 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6808 &fwk_DevCamDebug_af_search_type_select, 1);
6809 }
6810 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6811 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6812 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6813 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6814 &fwk_DevCamDebug_af_search_next_pos, 1);
6815 }
6816 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6817 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6818 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6819 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6820 &fwk_DevCamDebug_af_search_target_pos, 1);
6821 }
6822 // DevCamDebug metadata translateFromHalMetadata AEC
6823 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6824 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6825 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6826 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6827 }
6828 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6829 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6830 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6831 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6832 }
6833 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6834 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6835 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6836 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6837 }
6838 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6839 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6840 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6841 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6842 }
6843 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6844 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6845 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6846 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6847 }
6848 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6849 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6850 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6851 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6852 }
6853 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6854 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6855 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6856 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6857 }
6858 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6859 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6860 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6861 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6862 }
Samuel Ha34229982017-02-17 13:51:11 -08006863 // DevCamDebug metadata translateFromHalMetadata zzHDR
6864 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6865 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6866 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6867 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6868 }
6869 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6870 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006871 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006872 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6873 }
6874 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6875 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6876 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6877 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6878 }
6879 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6880 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006881 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006882 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6883 }
6884 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6885 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6886 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6887 *DevCamDebug_aec_hdr_sensitivity_ratio;
6888 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6889 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6890 }
6891 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6892 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6893 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6894 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6895 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6896 }
6897 // DevCamDebug metadata translateFromHalMetadata ADRC
6898 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6899 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6900 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6901 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6902 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6903 }
6904 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6905 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6906 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6907 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6908 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6909 }
6910 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6911 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6912 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6913 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6914 }
6915 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6916 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6917 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6918 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6919 }
6920 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6921 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6922 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6923 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6924 }
6925 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6926 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6927 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6928 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6929 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006930 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6931 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6932 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6933 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6934 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6935 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6936 }
6937 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6938 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6939 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6940 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6941 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6942 }
6943 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6944 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6945 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6946 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6947 &fwk_DevCamDebug_aec_subject_motion, 1);
6948 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006949 // DevCamDebug metadata translateFromHalMetadata AWB
6950 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6951 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6952 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6953 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6954 }
6955 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6956 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6957 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6958 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6959 }
6960 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6961 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6962 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6963 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6964 }
6965 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6966 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6967 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6968 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6969 }
6970 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6971 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6972 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6973 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6974 }
6975 }
6976 // atrace_end(ATRACE_TAG_ALWAYS);
6977
Thierry Strudel3d639192016-09-09 11:52:26 -07006978 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6979 int64_t fwk_frame_number = *frame_number;
6980 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6981 }
6982
6983 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6984 int32_t fps_range[2];
6985 fps_range[0] = (int32_t)float_range->min_fps;
6986 fps_range[1] = (int32_t)float_range->max_fps;
6987 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6988 fps_range, 2);
6989 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6990 fps_range[0], fps_range[1]);
6991 }
6992
6993 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6994 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6995 }
6996
6997 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6998 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6999 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7000 *sceneMode);
7001 if (NAME_NOT_FOUND != val) {
7002 uint8_t fwkSceneMode = (uint8_t)val;
7003 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7004 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7005 fwkSceneMode);
7006 }
7007 }
7008
7009 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7010 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7011 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7012 }
7013
7014 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7015 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7016 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7017 }
7018
7019 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7020 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7021 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7022 }
7023
7024 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7025 CAM_INTF_META_EDGE_MODE, metadata) {
7026 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7027 }
7028
7029 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7030 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7031 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7032 }
7033
7034 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7035 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7036 }
7037
7038 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7039 if (0 <= *flashState) {
7040 uint8_t fwk_flashState = (uint8_t) *flashState;
7041 if (!gCamCapability[mCameraId]->flash_available) {
7042 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7043 }
7044 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7045 }
7046 }
7047
7048 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7049 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7050 if (NAME_NOT_FOUND != val) {
7051 uint8_t fwk_flashMode = (uint8_t)val;
7052 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7053 }
7054 }
7055
7056 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7057 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7058 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7059 }
7060
7061 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7062 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7063 }
7064
7065 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7066 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7067 }
7068
7069 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7070 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7071 }
7072
7073 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7074 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7075 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7076 }
7077
7078 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7079 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7080 LOGD("fwk_videoStab = %d", fwk_videoStab);
7081 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7082 } else {
7083 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7084 // and so hardcoding the Video Stab result to OFF mode.
7085 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7086 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007087 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007088 }
7089
7090 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7091 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7092 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7093 }
7094
7095 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7096 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7097 }
7098
Thierry Strudel3d639192016-09-09 11:52:26 -07007099 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7100 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007101 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007102
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007103 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7104 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007105
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007106 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007107 blackLevelAppliedPattern->cam_black_level[0],
7108 blackLevelAppliedPattern->cam_black_level[1],
7109 blackLevelAppliedPattern->cam_black_level[2],
7110 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007111 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7112 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007113
7114#ifndef USE_HAL_3_3
7115 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307116 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007117 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307118 fwk_blackLevelInd[0] /= 16.0;
7119 fwk_blackLevelInd[1] /= 16.0;
7120 fwk_blackLevelInd[2] /= 16.0;
7121 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007122 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7123 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007124#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007125 }
7126
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007127#ifndef USE_HAL_3_3
7128 // Fixed whitelevel is used by ISP/Sensor
7129 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7130 &gCamCapability[mCameraId]->white_level, 1);
7131#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007132
7133 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7134 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7135 int32_t scalerCropRegion[4];
7136 scalerCropRegion[0] = hScalerCropRegion->left;
7137 scalerCropRegion[1] = hScalerCropRegion->top;
7138 scalerCropRegion[2] = hScalerCropRegion->width;
7139 scalerCropRegion[3] = hScalerCropRegion->height;
7140
7141 // Adjust crop region from sensor output coordinate system to active
7142 // array coordinate system.
7143 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7144 scalerCropRegion[2], scalerCropRegion[3]);
7145
7146 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7147 }
7148
7149 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7150 LOGD("sensorExpTime = %lld", *sensorExpTime);
7151 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7152 }
7153
7154 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7155 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7156 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7157 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7158 }
7159
7160 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7161 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7162 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7163 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7164 sensorRollingShutterSkew, 1);
7165 }
7166
7167 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7168 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7169 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7170
7171 //calculate the noise profile based on sensitivity
7172 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7173 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7174 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7175 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7176 noise_profile[i] = noise_profile_S;
7177 noise_profile[i+1] = noise_profile_O;
7178 }
7179 LOGD("noise model entry (S, O) is (%f, %f)",
7180 noise_profile_S, noise_profile_O);
7181 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7182 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7183 }
7184
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007185#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007186 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007187 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007188 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007189 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007190 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7191 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7192 }
7193 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007194#endif
7195
Thierry Strudel3d639192016-09-09 11:52:26 -07007196 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7197 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7198 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7199 }
7200
7201 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7202 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7203 *faceDetectMode);
7204 if (NAME_NOT_FOUND != val) {
7205 uint8_t fwk_faceDetectMode = (uint8_t)val;
7206 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7207
7208 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7209 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7210 CAM_INTF_META_FACE_DETECTION, metadata) {
7211 uint8_t numFaces = MIN(
7212 faceDetectionInfo->num_faces_detected, MAX_ROI);
7213 int32_t faceIds[MAX_ROI];
7214 uint8_t faceScores[MAX_ROI];
7215 int32_t faceRectangles[MAX_ROI * 4];
7216 int32_t faceLandmarks[MAX_ROI * 6];
7217 size_t j = 0, k = 0;
7218
7219 for (size_t i = 0; i < numFaces; i++) {
7220 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7221 // Adjust crop region from sensor output coordinate system to active
7222 // array coordinate system.
7223 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7224 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7225 rect.width, rect.height);
7226
7227 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7228 faceRectangles+j, -1);
7229
Jason Lee8ce36fa2017-04-19 19:40:37 -07007230 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7231 "bottom-right (%d, %d)",
7232 faceDetectionInfo->frame_id, i,
7233 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7234 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7235
Thierry Strudel3d639192016-09-09 11:52:26 -07007236 j+= 4;
7237 }
7238 if (numFaces <= 0) {
7239 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7240 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7241 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7242 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7243 }
7244
7245 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7246 numFaces);
7247 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7248 faceRectangles, numFaces * 4U);
7249 if (fwk_faceDetectMode ==
7250 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7251 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7252 CAM_INTF_META_FACE_LANDMARK, metadata) {
7253
7254 for (size_t i = 0; i < numFaces; i++) {
7255 // Map the co-ordinate sensor output coordinate system to active
7256 // array coordinate system.
7257 mCropRegionMapper.toActiveArray(
7258 landmarks->face_landmarks[i].left_eye_center.x,
7259 landmarks->face_landmarks[i].left_eye_center.y);
7260 mCropRegionMapper.toActiveArray(
7261 landmarks->face_landmarks[i].right_eye_center.x,
7262 landmarks->face_landmarks[i].right_eye_center.y);
7263 mCropRegionMapper.toActiveArray(
7264 landmarks->face_landmarks[i].mouth_center.x,
7265 landmarks->face_landmarks[i].mouth_center.y);
7266
7267 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007268
7269 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7270 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7271 faceDetectionInfo->frame_id, i,
7272 faceLandmarks[k + LEFT_EYE_X],
7273 faceLandmarks[k + LEFT_EYE_Y],
7274 faceLandmarks[k + RIGHT_EYE_X],
7275 faceLandmarks[k + RIGHT_EYE_Y],
7276 faceLandmarks[k + MOUTH_X],
7277 faceLandmarks[k + MOUTH_Y]);
7278
Thierry Strudel04e026f2016-10-10 11:27:36 -07007279 k+= TOTAL_LANDMARK_INDICES;
7280 }
7281 } else {
7282 for (size_t i = 0; i < numFaces; i++) {
7283 setInvalidLandmarks(faceLandmarks+k);
7284 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007285 }
7286 }
7287
Jason Lee49619db2017-04-13 12:07:22 -07007288 for (size_t i = 0; i < numFaces; i++) {
7289 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7290
7291 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7292 faceDetectionInfo->frame_id, i, faceIds[i]);
7293 }
7294
Thierry Strudel3d639192016-09-09 11:52:26 -07007295 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7296 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7297 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007298 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007299 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7300 CAM_INTF_META_FACE_BLINK, metadata) {
7301 uint8_t detected[MAX_ROI];
7302 uint8_t degree[MAX_ROI * 2];
7303 for (size_t i = 0; i < numFaces; i++) {
7304 detected[i] = blinks->blink[i].blink_detected;
7305 degree[2 * i] = blinks->blink[i].left_blink;
7306 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007307
Jason Lee49619db2017-04-13 12:07:22 -07007308 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7309 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7310 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7311 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007312 }
7313 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7314 detected, numFaces);
7315 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7316 degree, numFaces * 2);
7317 }
7318 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7319 CAM_INTF_META_FACE_SMILE, metadata) {
7320 uint8_t degree[MAX_ROI];
7321 uint8_t confidence[MAX_ROI];
7322 for (size_t i = 0; i < numFaces; i++) {
7323 degree[i] = smiles->smile[i].smile_degree;
7324 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007325
Jason Lee49619db2017-04-13 12:07:22 -07007326 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7327 "smile_degree=%d, smile_score=%d",
7328 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007329 }
7330 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7331 degree, numFaces);
7332 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7333 confidence, numFaces);
7334 }
7335 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7336 CAM_INTF_META_FACE_GAZE, metadata) {
7337 int8_t angle[MAX_ROI];
7338 int32_t direction[MAX_ROI * 3];
7339 int8_t degree[MAX_ROI * 2];
7340 for (size_t i = 0; i < numFaces; i++) {
7341 angle[i] = gazes->gaze[i].gaze_angle;
7342 direction[3 * i] = gazes->gaze[i].updown_dir;
7343 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7344 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7345 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7346 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007347
7348 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7349 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7350 "left_right_gaze=%d, top_bottom_gaze=%d",
7351 faceDetectionInfo->frame_id, i, angle[i],
7352 direction[3 * i], direction[3 * i + 1],
7353 direction[3 * i + 2],
7354 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007355 }
7356 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7357 (uint8_t *)angle, numFaces);
7358 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7359 direction, numFaces * 3);
7360 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7361 (uint8_t *)degree, numFaces * 2);
7362 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007363 }
7364 }
7365 }
7366 }
7367
7368 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7369 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007370 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007371 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007372 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007373
Shuzhen Wang14415f52016-11-16 18:26:18 -08007374 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7375 histogramBins = *histBins;
7376 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7377 }
7378
7379 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007380 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7381 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007382 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007383
7384 switch (stats_data->type) {
7385 case CAM_HISTOGRAM_TYPE_BAYER:
7386 switch (stats_data->bayer_stats.data_type) {
7387 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007388 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7389 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007390 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007391 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7392 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007393 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007394 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7395 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007396 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007397 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007398 case CAM_STATS_CHANNEL_R:
7399 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007400 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7401 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007402 }
7403 break;
7404 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007405 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007406 break;
7407 }
7408
Shuzhen Wang14415f52016-11-16 18:26:18 -08007409 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007410 }
7411 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007412 }
7413
7414 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7415 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7416 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7417 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7418 }
7419
7420 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7421 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7422 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7423 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7424 }
7425
7426 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7427 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7428 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7429 CAM_MAX_SHADING_MAP_HEIGHT);
7430 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7431 CAM_MAX_SHADING_MAP_WIDTH);
7432 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7433 lensShadingMap->lens_shading, 4U * map_width * map_height);
7434 }
7435
7436 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7437 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7438 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7439 }
7440
7441 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7442 //Populate CAM_INTF_META_TONEMAP_CURVES
7443 /* ch0 = G, ch 1 = B, ch 2 = R*/
7444 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7445 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7446 tonemap->tonemap_points_cnt,
7447 CAM_MAX_TONEMAP_CURVE_SIZE);
7448 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7449 }
7450
7451 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7452 &tonemap->curves[0].tonemap_points[0][0],
7453 tonemap->tonemap_points_cnt * 2);
7454
7455 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7456 &tonemap->curves[1].tonemap_points[0][0],
7457 tonemap->tonemap_points_cnt * 2);
7458
7459 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7460 &tonemap->curves[2].tonemap_points[0][0],
7461 tonemap->tonemap_points_cnt * 2);
7462 }
7463
7464 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7465 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7466 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7467 CC_GAIN_MAX);
7468 }
7469
7470 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7471 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7472 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7473 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7474 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7475 }
7476
7477 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7478 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7479 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7480 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7481 toneCurve->tonemap_points_cnt,
7482 CAM_MAX_TONEMAP_CURVE_SIZE);
7483 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7484 }
7485 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7486 (float*)toneCurve->curve.tonemap_points,
7487 toneCurve->tonemap_points_cnt * 2);
7488 }
7489
7490 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7491 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7492 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7493 predColorCorrectionGains->gains, 4);
7494 }
7495
7496 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7497 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7498 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7499 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7500 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7501 }
7502
7503 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7504 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7505 }
7506
7507 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7508 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7509 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7510 }
7511
7512 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7513 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7514 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7515 }
7516
7517 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7518 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7519 *effectMode);
7520 if (NAME_NOT_FOUND != val) {
7521 uint8_t fwk_effectMode = (uint8_t)val;
7522 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7523 }
7524 }
7525
7526 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7527 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7528 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7529 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7530 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7531 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7532 }
7533 int32_t fwk_testPatternData[4];
7534 fwk_testPatternData[0] = testPatternData->r;
7535 fwk_testPatternData[3] = testPatternData->b;
7536 switch (gCamCapability[mCameraId]->color_arrangement) {
7537 case CAM_FILTER_ARRANGEMENT_RGGB:
7538 case CAM_FILTER_ARRANGEMENT_GRBG:
7539 fwk_testPatternData[1] = testPatternData->gr;
7540 fwk_testPatternData[2] = testPatternData->gb;
7541 break;
7542 case CAM_FILTER_ARRANGEMENT_GBRG:
7543 case CAM_FILTER_ARRANGEMENT_BGGR:
7544 fwk_testPatternData[2] = testPatternData->gr;
7545 fwk_testPatternData[1] = testPatternData->gb;
7546 break;
7547 default:
7548 LOGE("color arrangement %d is not supported",
7549 gCamCapability[mCameraId]->color_arrangement);
7550 break;
7551 }
7552 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7553 }
7554
7555 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7556 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7557 }
7558
7559 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7560 String8 str((const char *)gps_methods);
7561 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7562 }
7563
7564 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7565 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7566 }
7567
7568 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7569 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7570 }
7571
7572 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7573 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7574 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7575 }
7576
7577 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7578 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7579 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7580 }
7581
7582 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7583 int32_t fwk_thumb_size[2];
7584 fwk_thumb_size[0] = thumb_size->width;
7585 fwk_thumb_size[1] = thumb_size->height;
7586 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7587 }
7588
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007589 // Skip reprocess metadata if there is no input stream.
7590 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7591 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7592 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7593 privateData,
7594 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7595 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007596 }
7597
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007598 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007599 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007600 meteringMode, 1);
7601 }
7602
Thierry Strudel54dc9782017-02-15 12:12:10 -08007603 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7604 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7605 LOGD("hdr_scene_data: %d %f\n",
7606 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7607 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7608 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7609 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7610 &isHdr, 1);
7611 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7612 &isHdrConfidence, 1);
7613 }
7614
7615
7616
Thierry Strudel3d639192016-09-09 11:52:26 -07007617 if (metadata->is_tuning_params_valid) {
7618 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7619 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7620 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7621
7622
7623 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7624 sizeof(uint32_t));
7625 data += sizeof(uint32_t);
7626
7627 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7628 sizeof(uint32_t));
7629 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7630 data += sizeof(uint32_t);
7631
7632 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7633 sizeof(uint32_t));
7634 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7635 data += sizeof(uint32_t);
7636
7637 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7638 sizeof(uint32_t));
7639 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7640 data += sizeof(uint32_t);
7641
7642 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7643 sizeof(uint32_t));
7644 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7645 data += sizeof(uint32_t);
7646
7647 metadata->tuning_params.tuning_mod3_data_size = 0;
7648 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7649 sizeof(uint32_t));
7650 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7651 data += sizeof(uint32_t);
7652
7653 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7654 TUNING_SENSOR_DATA_MAX);
7655 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7656 count);
7657 data += count;
7658
7659 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7660 TUNING_VFE_DATA_MAX);
7661 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7662 count);
7663 data += count;
7664
7665 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7666 TUNING_CPP_DATA_MAX);
7667 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7668 count);
7669 data += count;
7670
7671 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7672 TUNING_CAC_DATA_MAX);
7673 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7674 count);
7675 data += count;
7676
7677 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7678 (int32_t *)(void *)tuning_meta_data_blob,
7679 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7680 }
7681
7682 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7683 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7684 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7685 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7686 NEUTRAL_COL_POINTS);
7687 }
7688
7689 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7690 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7691 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7692 }
7693
7694 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7695 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7696 // Adjust crop region from sensor output coordinate system to active
7697 // array coordinate system.
7698 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7699 hAeRegions->rect.width, hAeRegions->rect.height);
7700
7701 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7702 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7703 REGIONS_TUPLE_COUNT);
7704 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7705 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7706 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7707 hAeRegions->rect.height);
7708 }
7709
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007710 if (!pendingRequest.focusStateSent) {
7711 if (pendingRequest.focusStateValid) {
7712 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7713 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007714 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007715 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7716 uint8_t fwk_afState = (uint8_t) *afState;
7717 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7718 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7719 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007720 }
7721 }
7722
Thierry Strudel3d639192016-09-09 11:52:26 -07007723 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7724 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7725 }
7726
7727 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7728 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7729 }
7730
7731 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7732 uint8_t fwk_lensState = *lensState;
7733 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7734 }
7735
Thierry Strudel3d639192016-09-09 11:52:26 -07007736 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007737 uint32_t ab_mode = *hal_ab_mode;
7738 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7739 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7740 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7741 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007742 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007743 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007744 if (NAME_NOT_FOUND != val) {
7745 uint8_t fwk_ab_mode = (uint8_t)val;
7746 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7747 }
7748 }
7749
7750 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7751 int val = lookupFwkName(SCENE_MODES_MAP,
7752 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7753 if (NAME_NOT_FOUND != val) {
7754 uint8_t fwkBestshotMode = (uint8_t)val;
7755 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7756 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7757 } else {
7758 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7759 }
7760 }
7761
7762 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7763 uint8_t fwk_mode = (uint8_t) *mode;
7764 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7765 }
7766
7767 /* Constant metadata values to be update*/
7768 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7769 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7770
7771 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7772 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7773
7774 int32_t hotPixelMap[2];
7775 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7776
7777 // CDS
7778 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7779 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7780 }
7781
Thierry Strudel04e026f2016-10-10 11:27:36 -07007782 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7783 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007784 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007785 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7786 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7787 } else {
7788 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7789 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007790
7791 if(fwk_hdr != curr_hdr_state) {
7792 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7793 if(fwk_hdr)
7794 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7795 else
7796 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7797 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007798 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7799 }
7800
Thierry Strudel54dc9782017-02-15 12:12:10 -08007801 //binning correction
7802 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7803 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7804 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7805 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7806 }
7807
Thierry Strudel04e026f2016-10-10 11:27:36 -07007808 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007809 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007810 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7811 int8_t is_ir_on = 0;
7812
7813 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7814 if(is_ir_on != curr_ir_state) {
7815 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7816 if(is_ir_on)
7817 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7818 else
7819 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7820 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007821 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007822 }
7823
Thierry Strudel269c81a2016-10-12 12:13:59 -07007824 // AEC SPEED
7825 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7826 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7827 }
7828
7829 // AWB SPEED
7830 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7831 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7832 }
7833
Thierry Strudel3d639192016-09-09 11:52:26 -07007834 // TNR
7835 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7836 uint8_t tnr_enable = tnr->denoise_enable;
7837 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007838 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7839 int8_t is_tnr_on = 0;
7840
7841 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7842 if(is_tnr_on != curr_tnr_state) {
7843 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7844 if(is_tnr_on)
7845 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7846 else
7847 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7848 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007849
7850 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7851 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7852 }
7853
7854 // Reprocess crop data
7855 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7856 uint8_t cnt = crop_data->num_of_streams;
7857 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7858 // mm-qcamera-daemon only posts crop_data for streams
7859 // not linked to pproc. So no valid crop metadata is not
7860 // necessarily an error case.
7861 LOGD("No valid crop metadata entries");
7862 } else {
7863 uint32_t reproc_stream_id;
7864 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7865 LOGD("No reprocessible stream found, ignore crop data");
7866 } else {
7867 int rc = NO_ERROR;
7868 Vector<int32_t> roi_map;
7869 int32_t *crop = new int32_t[cnt*4];
7870 if (NULL == crop) {
7871 rc = NO_MEMORY;
7872 }
7873 if (NO_ERROR == rc) {
7874 int32_t streams_found = 0;
7875 for (size_t i = 0; i < cnt; i++) {
7876 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7877 if (pprocDone) {
7878 // HAL already does internal reprocessing,
7879 // either via reprocessing before JPEG encoding,
7880 // or offline postprocessing for pproc bypass case.
7881 crop[0] = 0;
7882 crop[1] = 0;
7883 crop[2] = mInputStreamInfo.dim.width;
7884 crop[3] = mInputStreamInfo.dim.height;
7885 } else {
7886 crop[0] = crop_data->crop_info[i].crop.left;
7887 crop[1] = crop_data->crop_info[i].crop.top;
7888 crop[2] = crop_data->crop_info[i].crop.width;
7889 crop[3] = crop_data->crop_info[i].crop.height;
7890 }
7891 roi_map.add(crop_data->crop_info[i].roi_map.left);
7892 roi_map.add(crop_data->crop_info[i].roi_map.top);
7893 roi_map.add(crop_data->crop_info[i].roi_map.width);
7894 roi_map.add(crop_data->crop_info[i].roi_map.height);
7895 streams_found++;
7896 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7897 crop[0], crop[1], crop[2], crop[3]);
7898 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7899 crop_data->crop_info[i].roi_map.left,
7900 crop_data->crop_info[i].roi_map.top,
7901 crop_data->crop_info[i].roi_map.width,
7902 crop_data->crop_info[i].roi_map.height);
7903 break;
7904
7905 }
7906 }
7907 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7908 &streams_found, 1);
7909 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7910 crop, (size_t)(streams_found * 4));
7911 if (roi_map.array()) {
7912 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7913 roi_map.array(), roi_map.size());
7914 }
7915 }
7916 if (crop) {
7917 delete [] crop;
7918 }
7919 }
7920 }
7921 }
7922
7923 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7924 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7925 // so hardcoding the CAC result to OFF mode.
7926 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7927 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7928 } else {
7929 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7930 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7931 *cacMode);
7932 if (NAME_NOT_FOUND != val) {
7933 uint8_t resultCacMode = (uint8_t)val;
7934 // check whether CAC result from CB is equal to Framework set CAC mode
7935 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007936 if (pendingRequest.fwkCacMode != resultCacMode) {
7937 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007938 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007939 //Check if CAC is disabled by property
7940 if (m_cacModeDisabled) {
7941 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7942 }
7943
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007944 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007945 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7946 } else {
7947 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7948 }
7949 }
7950 }
7951
7952 // Post blob of cam_cds_data through vendor tag.
7953 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7954 uint8_t cnt = cdsInfo->num_of_streams;
7955 cam_cds_data_t cdsDataOverride;
7956 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7957 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7958 cdsDataOverride.num_of_streams = 1;
7959 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7960 uint32_t reproc_stream_id;
7961 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7962 LOGD("No reprocessible stream found, ignore cds data");
7963 } else {
7964 for (size_t i = 0; i < cnt; i++) {
7965 if (cdsInfo->cds_info[i].stream_id ==
7966 reproc_stream_id) {
7967 cdsDataOverride.cds_info[0].cds_enable =
7968 cdsInfo->cds_info[i].cds_enable;
7969 break;
7970 }
7971 }
7972 }
7973 } else {
7974 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7975 }
7976 camMetadata.update(QCAMERA3_CDS_INFO,
7977 (uint8_t *)&cdsDataOverride,
7978 sizeof(cam_cds_data_t));
7979 }
7980
7981 // Ldaf calibration data
7982 if (!mLdafCalibExist) {
7983 IF_META_AVAILABLE(uint32_t, ldafCalib,
7984 CAM_INTF_META_LDAF_EXIF, metadata) {
7985 mLdafCalibExist = true;
7986 mLdafCalib[0] = ldafCalib[0];
7987 mLdafCalib[1] = ldafCalib[1];
7988 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7989 ldafCalib[0], ldafCalib[1]);
7990 }
7991 }
7992
Thierry Strudel54dc9782017-02-15 12:12:10 -08007993 // EXIF debug data through vendor tag
7994 /*
7995 * Mobicat Mask can assume 3 values:
7996 * 1 refers to Mobicat data,
7997 * 2 refers to Stats Debug and Exif Debug Data
7998 * 3 refers to Mobicat and Stats Debug Data
7999 * We want to make sure that we are sending Exif debug data
8000 * only when Mobicat Mask is 2.
8001 */
8002 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8003 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8004 (uint8_t *)(void *)mExifParams.debug_params,
8005 sizeof(mm_jpeg_debug_exif_params_t));
8006 }
8007
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008008 // Reprocess and DDM debug data through vendor tag
8009 cam_reprocess_info_t repro_info;
8010 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008011 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8012 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008013 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008014 }
8015 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8016 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008017 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008018 }
8019 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8020 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008021 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008022 }
8023 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8024 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008025 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008026 }
8027 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8028 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008029 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008030 }
8031 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008032 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008033 }
8034 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8035 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008036 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008037 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008038 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8039 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8040 }
8041 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8042 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8043 }
8044 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8045 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008046
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008047 // INSTANT AEC MODE
8048 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8049 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8050 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8051 }
8052
Shuzhen Wange763e802016-03-31 10:24:29 -07008053 // AF scene change
8054 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8055 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8056 }
8057
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008058 // Enable ZSL
8059 if (enableZsl != nullptr) {
8060 uint8_t value = *enableZsl ?
8061 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8062 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8063 }
8064
Xu Han821ea9c2017-05-23 09:00:40 -07008065 // OIS Data
8066 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8067 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8068 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8069 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8070 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8071 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8072 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8073 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8074 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8075 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8076 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8077 }
8078
Thierry Strudel3d639192016-09-09 11:52:26 -07008079 resultMetadata = camMetadata.release();
8080 return resultMetadata;
8081}
8082
8083/*===========================================================================
8084 * FUNCTION : saveExifParams
8085 *
8086 * DESCRIPTION:
8087 *
8088 * PARAMETERS :
8089 * @metadata : metadata information from callback
8090 *
8091 * RETURN : none
8092 *
8093 *==========================================================================*/
8094void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8095{
8096 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8097 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8098 if (mExifParams.debug_params) {
8099 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8100 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8101 }
8102 }
8103 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8104 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8105 if (mExifParams.debug_params) {
8106 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8107 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8108 }
8109 }
8110 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8111 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8112 if (mExifParams.debug_params) {
8113 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8114 mExifParams.debug_params->af_debug_params_valid = TRUE;
8115 }
8116 }
8117 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8118 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8119 if (mExifParams.debug_params) {
8120 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8121 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8122 }
8123 }
8124 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8125 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8126 if (mExifParams.debug_params) {
8127 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8128 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8129 }
8130 }
8131 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8132 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8133 if (mExifParams.debug_params) {
8134 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8135 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8136 }
8137 }
8138 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8139 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8140 if (mExifParams.debug_params) {
8141 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8142 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8143 }
8144 }
8145 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8146 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8147 if (mExifParams.debug_params) {
8148 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8149 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8150 }
8151 }
8152}
8153
8154/*===========================================================================
8155 * FUNCTION : get3AExifParams
8156 *
8157 * DESCRIPTION:
8158 *
8159 * PARAMETERS : none
8160 *
8161 *
8162 * RETURN : mm_jpeg_exif_params_t
8163 *
8164 *==========================================================================*/
8165mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8166{
8167 return mExifParams;
8168}
8169
8170/*===========================================================================
8171 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8172 *
8173 * DESCRIPTION:
8174 *
8175 * PARAMETERS :
8176 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008177 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8178 * urgent metadata in a batch. Always true for
8179 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008180 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008181 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8182 * i.e. even though it doesn't map to a valid partial
8183 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008184 * RETURN : camera_metadata_t*
8185 * metadata in a format specified by fwk
8186 *==========================================================================*/
8187camera_metadata_t*
8188QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008189 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008190 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008191{
8192 CameraMetadata camMetadata;
8193 camera_metadata_t *resultMetadata;
8194
Shuzhen Wang485e2442017-08-02 12:21:08 -07008195 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008196 /* In batch mode, use empty metadata if this is not the last in batch
8197 */
8198 resultMetadata = allocate_camera_metadata(0, 0);
8199 return resultMetadata;
8200 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008201
8202 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8203 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8204 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8205 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8206 }
8207
8208 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8209 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8210 &aecTrigger->trigger, 1);
8211 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8212 &aecTrigger->trigger_id, 1);
8213 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8214 aecTrigger->trigger);
8215 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8216 aecTrigger->trigger_id);
8217 }
8218
8219 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8220 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8221 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8222 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8223 }
8224
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008225 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8226 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8227 if (NAME_NOT_FOUND != val) {
8228 uint8_t fwkAfMode = (uint8_t)val;
8229 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8230 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8231 } else {
8232 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8233 val);
8234 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008235 }
8236
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008237 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8238 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8239 af_trigger->trigger);
8240 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8241 af_trigger->trigger_id);
8242
8243 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8244 mAfTrigger = *af_trigger;
8245 uint32_t fwk_AfState = (uint32_t) *afState;
8246
8247 // If this is the result for a new trigger, check if there is new early
8248 // af state. If there is, use the last af state for all results
8249 // preceding current partial frame number.
8250 for (auto & pendingRequest : mPendingRequestsList) {
8251 if (pendingRequest.frame_number < frame_number) {
8252 pendingRequest.focusStateValid = true;
8253 pendingRequest.focusState = fwk_AfState;
8254 } else if (pendingRequest.frame_number == frame_number) {
8255 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8256 // Check if early AF state for trigger exists. If yes, send AF state as
8257 // partial result for better latency.
8258 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8259 pendingRequest.focusStateSent = true;
8260 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8261 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8262 frame_number, fwkEarlyAfState);
8263 }
8264 }
8265 }
8266 }
8267 }
8268 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8269 &mAfTrigger.trigger, 1);
8270 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8271
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008272 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8273 /*af regions*/
8274 int32_t afRegions[REGIONS_TUPLE_COUNT];
8275 // Adjust crop region from sensor output coordinate system to active
8276 // array coordinate system.
8277 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8278 hAfRegions->rect.width, hAfRegions->rect.height);
8279
8280 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8281 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8282 REGIONS_TUPLE_COUNT);
8283 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8284 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8285 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8286 hAfRegions->rect.height);
8287 }
8288
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008289 // AF region confidence
8290 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8291 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8292 }
8293
Thierry Strudel3d639192016-09-09 11:52:26 -07008294 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8295 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8296 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8297 if (NAME_NOT_FOUND != val) {
8298 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8299 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8300 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8301 } else {
8302 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8303 }
8304 }
8305
8306 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8307 uint32_t aeMode = CAM_AE_MODE_MAX;
8308 int32_t flashMode = CAM_FLASH_MODE_MAX;
8309 int32_t redeye = -1;
8310 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8311 aeMode = *pAeMode;
8312 }
8313 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8314 flashMode = *pFlashMode;
8315 }
8316 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8317 redeye = *pRedeye;
8318 }
8319
8320 if (1 == redeye) {
8321 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8322 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8323 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8324 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8325 flashMode);
8326 if (NAME_NOT_FOUND != val) {
8327 fwk_aeMode = (uint8_t)val;
8328 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8329 } else {
8330 LOGE("Unsupported flash mode %d", flashMode);
8331 }
8332 } else if (aeMode == CAM_AE_MODE_ON) {
8333 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8334 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8335 } else if (aeMode == CAM_AE_MODE_OFF) {
8336 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8337 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008338 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8339 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8340 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008341 } else {
8342 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8343 "flashMode:%d, aeMode:%u!!!",
8344 redeye, flashMode, aeMode);
8345 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008346 if (mInstantAEC) {
8347 // Increment frame Idx count untill a bound reached for instant AEC.
8348 mInstantAecFrameIdxCount++;
8349 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8350 CAM_INTF_META_AEC_INFO, metadata) {
8351 LOGH("ae_params->settled = %d",ae_params->settled);
8352 // If AEC settled, or if number of frames reached bound value,
8353 // should reset instant AEC.
8354 if (ae_params->settled ||
8355 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8356 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8357 mInstantAEC = false;
8358 mResetInstantAEC = true;
8359 mInstantAecFrameIdxCount = 0;
8360 }
8361 }
8362 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008363 resultMetadata = camMetadata.release();
8364 return resultMetadata;
8365}
8366
8367/*===========================================================================
8368 * FUNCTION : dumpMetadataToFile
8369 *
8370 * DESCRIPTION: Dumps tuning metadata to file system
8371 *
8372 * PARAMETERS :
8373 * @meta : tuning metadata
8374 * @dumpFrameCount : current dump frame count
8375 * @enabled : Enable mask
8376 *
8377 *==========================================================================*/
8378void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8379 uint32_t &dumpFrameCount,
8380 bool enabled,
8381 const char *type,
8382 uint32_t frameNumber)
8383{
8384 //Some sanity checks
8385 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8386 LOGE("Tuning sensor data size bigger than expected %d: %d",
8387 meta.tuning_sensor_data_size,
8388 TUNING_SENSOR_DATA_MAX);
8389 return;
8390 }
8391
8392 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8393 LOGE("Tuning VFE data size bigger than expected %d: %d",
8394 meta.tuning_vfe_data_size,
8395 TUNING_VFE_DATA_MAX);
8396 return;
8397 }
8398
8399 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8400 LOGE("Tuning CPP data size bigger than expected %d: %d",
8401 meta.tuning_cpp_data_size,
8402 TUNING_CPP_DATA_MAX);
8403 return;
8404 }
8405
8406 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8407 LOGE("Tuning CAC data size bigger than expected %d: %d",
8408 meta.tuning_cac_data_size,
8409 TUNING_CAC_DATA_MAX);
8410 return;
8411 }
8412 //
8413
8414 if(enabled){
8415 char timeBuf[FILENAME_MAX];
8416 char buf[FILENAME_MAX];
8417 memset(buf, 0, sizeof(buf));
8418 memset(timeBuf, 0, sizeof(timeBuf));
8419 time_t current_time;
8420 struct tm * timeinfo;
8421 time (&current_time);
8422 timeinfo = localtime (&current_time);
8423 if (timeinfo != NULL) {
8424 strftime (timeBuf, sizeof(timeBuf),
8425 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8426 }
8427 String8 filePath(timeBuf);
8428 snprintf(buf,
8429 sizeof(buf),
8430 "%dm_%s_%d.bin",
8431 dumpFrameCount,
8432 type,
8433 frameNumber);
8434 filePath.append(buf);
8435 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8436 if (file_fd >= 0) {
8437 ssize_t written_len = 0;
8438 meta.tuning_data_version = TUNING_DATA_VERSION;
8439 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8440 written_len += write(file_fd, data, sizeof(uint32_t));
8441 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8442 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8443 written_len += write(file_fd, data, sizeof(uint32_t));
8444 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8445 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8446 written_len += write(file_fd, data, sizeof(uint32_t));
8447 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8448 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8449 written_len += write(file_fd, data, sizeof(uint32_t));
8450 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8451 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8452 written_len += write(file_fd, data, sizeof(uint32_t));
8453 meta.tuning_mod3_data_size = 0;
8454 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8455 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8456 written_len += write(file_fd, data, sizeof(uint32_t));
8457 size_t total_size = meta.tuning_sensor_data_size;
8458 data = (void *)((uint8_t *)&meta.data);
8459 written_len += write(file_fd, data, total_size);
8460 total_size = meta.tuning_vfe_data_size;
8461 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8462 written_len += write(file_fd, data, total_size);
8463 total_size = meta.tuning_cpp_data_size;
8464 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8465 written_len += write(file_fd, data, total_size);
8466 total_size = meta.tuning_cac_data_size;
8467 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8468 written_len += write(file_fd, data, total_size);
8469 close(file_fd);
8470 }else {
8471 LOGE("fail to open file for metadata dumping");
8472 }
8473 }
8474}
8475
8476/*===========================================================================
8477 * FUNCTION : cleanAndSortStreamInfo
8478 *
8479 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8480 * and sort them such that raw stream is at the end of the list
8481 * This is a workaround for camera daemon constraint.
8482 *
8483 * PARAMETERS : None
8484 *
8485 *==========================================================================*/
8486void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8487{
8488 List<stream_info_t *> newStreamInfo;
8489
8490 /*clean up invalid streams*/
8491 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8492 it != mStreamInfo.end();) {
8493 if(((*it)->status) == INVALID){
8494 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8495 delete channel;
8496 free(*it);
8497 it = mStreamInfo.erase(it);
8498 } else {
8499 it++;
8500 }
8501 }
8502
8503 // Move preview/video/callback/snapshot streams into newList
8504 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8505 it != mStreamInfo.end();) {
8506 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8507 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8508 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8509 newStreamInfo.push_back(*it);
8510 it = mStreamInfo.erase(it);
8511 } else
8512 it++;
8513 }
8514 // Move raw streams into newList
8515 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8516 it != mStreamInfo.end();) {
8517 newStreamInfo.push_back(*it);
8518 it = mStreamInfo.erase(it);
8519 }
8520
8521 mStreamInfo = newStreamInfo;
8522}
8523
8524/*===========================================================================
8525 * FUNCTION : extractJpegMetadata
8526 *
8527 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8528 * JPEG metadata is cached in HAL, and return as part of capture
8529 * result when metadata is returned from camera daemon.
8530 *
8531 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8532 * @request: capture request
8533 *
8534 *==========================================================================*/
8535void QCamera3HardwareInterface::extractJpegMetadata(
8536 CameraMetadata& jpegMetadata,
8537 const camera3_capture_request_t *request)
8538{
8539 CameraMetadata frame_settings;
8540 frame_settings = request->settings;
8541
8542 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8543 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8544 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8545 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8546
8547 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8548 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8549 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8550 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8551
8552 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8553 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8554 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8555 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8556
8557 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8558 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8559 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8560 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8561
8562 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8563 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8564 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8565 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8566
8567 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8568 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8569 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8570 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8571
8572 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8573 int32_t thumbnail_size[2];
8574 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8575 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8576 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8577 int32_t orientation =
8578 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008579 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008580 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8581 int32_t temp;
8582 temp = thumbnail_size[0];
8583 thumbnail_size[0] = thumbnail_size[1];
8584 thumbnail_size[1] = temp;
8585 }
8586 }
8587 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8588 thumbnail_size,
8589 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8590 }
8591
8592}
8593
8594/*===========================================================================
8595 * FUNCTION : convertToRegions
8596 *
8597 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8598 *
8599 * PARAMETERS :
8600 * @rect : cam_rect_t struct to convert
8601 * @region : int32_t destination array
8602 * @weight : if we are converting from cam_area_t, weight is valid
8603 * else weight = -1
8604 *
8605 *==========================================================================*/
8606void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8607 int32_t *region, int weight)
8608{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008609 region[FACE_LEFT] = rect.left;
8610 region[FACE_TOP] = rect.top;
8611 region[FACE_RIGHT] = rect.left + rect.width;
8612 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008613 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008614 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008615 }
8616}
8617
8618/*===========================================================================
8619 * FUNCTION : convertFromRegions
8620 *
8621 * DESCRIPTION: helper method to convert from array to cam_rect_t
8622 *
8623 * PARAMETERS :
8624 * @rect : cam_rect_t struct to convert
8625 * @region : int32_t destination array
8626 * @weight : if we are converting from cam_area_t, weight is valid
8627 * else weight = -1
8628 *
8629 *==========================================================================*/
8630void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008631 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008632{
Thierry Strudel3d639192016-09-09 11:52:26 -07008633 int32_t x_min = frame_settings.find(tag).data.i32[0];
8634 int32_t y_min = frame_settings.find(tag).data.i32[1];
8635 int32_t x_max = frame_settings.find(tag).data.i32[2];
8636 int32_t y_max = frame_settings.find(tag).data.i32[3];
8637 roi.weight = frame_settings.find(tag).data.i32[4];
8638 roi.rect.left = x_min;
8639 roi.rect.top = y_min;
8640 roi.rect.width = x_max - x_min;
8641 roi.rect.height = y_max - y_min;
8642}
8643
8644/*===========================================================================
8645 * FUNCTION : resetIfNeededROI
8646 *
8647 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8648 * crop region
8649 *
8650 * PARAMETERS :
8651 * @roi : cam_area_t struct to resize
8652 * @scalerCropRegion : cam_crop_region_t region to compare against
8653 *
8654 *
8655 *==========================================================================*/
8656bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8657 const cam_crop_region_t* scalerCropRegion)
8658{
8659 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8660 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8661 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8662 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8663
8664 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8665 * without having this check the calculations below to validate if the roi
8666 * is inside scalar crop region will fail resulting in the roi not being
8667 * reset causing algorithm to continue to use stale roi window
8668 */
8669 if (roi->weight == 0) {
8670 return true;
8671 }
8672
8673 if ((roi_x_max < scalerCropRegion->left) ||
8674 // right edge of roi window is left of scalar crop's left edge
8675 (roi_y_max < scalerCropRegion->top) ||
8676 // bottom edge of roi window is above scalar crop's top edge
8677 (roi->rect.left > crop_x_max) ||
8678 // left edge of roi window is beyond(right) of scalar crop's right edge
8679 (roi->rect.top > crop_y_max)){
8680 // top edge of roi windo is above scalar crop's top edge
8681 return false;
8682 }
8683 if (roi->rect.left < scalerCropRegion->left) {
8684 roi->rect.left = scalerCropRegion->left;
8685 }
8686 if (roi->rect.top < scalerCropRegion->top) {
8687 roi->rect.top = scalerCropRegion->top;
8688 }
8689 if (roi_x_max > crop_x_max) {
8690 roi_x_max = crop_x_max;
8691 }
8692 if (roi_y_max > crop_y_max) {
8693 roi_y_max = crop_y_max;
8694 }
8695 roi->rect.width = roi_x_max - roi->rect.left;
8696 roi->rect.height = roi_y_max - roi->rect.top;
8697 return true;
8698}
8699
8700/*===========================================================================
8701 * FUNCTION : convertLandmarks
8702 *
8703 * DESCRIPTION: helper method to extract the landmarks from face detection info
8704 *
8705 * PARAMETERS :
8706 * @landmark_data : input landmark data to be converted
8707 * @landmarks : int32_t destination array
8708 *
8709 *
8710 *==========================================================================*/
8711void QCamera3HardwareInterface::convertLandmarks(
8712 cam_face_landmarks_info_t landmark_data,
8713 int32_t *landmarks)
8714{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008715 if (landmark_data.is_left_eye_valid) {
8716 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8717 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8718 } else {
8719 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8720 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8721 }
8722
8723 if (landmark_data.is_right_eye_valid) {
8724 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8725 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8726 } else {
8727 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8728 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8729 }
8730
8731 if (landmark_data.is_mouth_valid) {
8732 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8733 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8734 } else {
8735 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8736 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8737 }
8738}
8739
8740/*===========================================================================
8741 * FUNCTION : setInvalidLandmarks
8742 *
8743 * DESCRIPTION: helper method to set invalid landmarks
8744 *
8745 * PARAMETERS :
8746 * @landmarks : int32_t destination array
8747 *
8748 *
8749 *==========================================================================*/
8750void QCamera3HardwareInterface::setInvalidLandmarks(
8751 int32_t *landmarks)
8752{
8753 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8754 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8755 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8756 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8757 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8758 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008759}
8760
8761#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008762
8763/*===========================================================================
8764 * FUNCTION : getCapabilities
8765 *
8766 * DESCRIPTION: query camera capability from back-end
8767 *
8768 * PARAMETERS :
8769 * @ops : mm-interface ops structure
8770 * @cam_handle : camera handle for which we need capability
8771 *
8772 * RETURN : ptr type of capability structure
8773 * capability for success
8774 * NULL for failure
8775 *==========================================================================*/
8776cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8777 uint32_t cam_handle)
8778{
8779 int rc = NO_ERROR;
8780 QCamera3HeapMemory *capabilityHeap = NULL;
8781 cam_capability_t *cap_ptr = NULL;
8782
8783 if (ops == NULL) {
8784 LOGE("Invalid arguments");
8785 return NULL;
8786 }
8787
8788 capabilityHeap = new QCamera3HeapMemory(1);
8789 if (capabilityHeap == NULL) {
8790 LOGE("creation of capabilityHeap failed");
8791 return NULL;
8792 }
8793
8794 /* Allocate memory for capability buffer */
8795 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8796 if(rc != OK) {
8797 LOGE("No memory for cappability");
8798 goto allocate_failed;
8799 }
8800
8801 /* Map memory for capability buffer */
8802 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8803
8804 rc = ops->map_buf(cam_handle,
8805 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8806 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8807 if(rc < 0) {
8808 LOGE("failed to map capability buffer");
8809 rc = FAILED_TRANSACTION;
8810 goto map_failed;
8811 }
8812
8813 /* Query Capability */
8814 rc = ops->query_capability(cam_handle);
8815 if(rc < 0) {
8816 LOGE("failed to query capability");
8817 rc = FAILED_TRANSACTION;
8818 goto query_failed;
8819 }
8820
8821 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8822 if (cap_ptr == NULL) {
8823 LOGE("out of memory");
8824 rc = NO_MEMORY;
8825 goto query_failed;
8826 }
8827
8828 memset(cap_ptr, 0, sizeof(cam_capability_t));
8829 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8830
8831 int index;
8832 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8833 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8834 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8835 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8836 }
8837
8838query_failed:
8839 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8840map_failed:
8841 capabilityHeap->deallocate();
8842allocate_failed:
8843 delete capabilityHeap;
8844
8845 if (rc != NO_ERROR) {
8846 return NULL;
8847 } else {
8848 return cap_ptr;
8849 }
8850}
8851
Thierry Strudel3d639192016-09-09 11:52:26 -07008852/*===========================================================================
8853 * FUNCTION : initCapabilities
8854 *
8855 * DESCRIPTION: initialize camera capabilities in static data struct
8856 *
8857 * PARAMETERS :
8858 * @cameraId : camera Id
8859 *
8860 * RETURN : int32_t type of status
8861 * NO_ERROR -- success
8862 * none-zero failure code
8863 *==========================================================================*/
8864int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8865{
8866 int rc = 0;
8867 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008868 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008869
8870 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8871 if (rc) {
8872 LOGE("camera_open failed. rc = %d", rc);
8873 goto open_failed;
8874 }
8875 if (!cameraHandle) {
8876 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8877 goto open_failed;
8878 }
8879
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008880 handle = get_main_camera_handle(cameraHandle->camera_handle);
8881 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8882 if (gCamCapability[cameraId] == NULL) {
8883 rc = FAILED_TRANSACTION;
8884 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008885 }
8886
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008887 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008888 if (is_dual_camera_by_idx(cameraId)) {
8889 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8890 gCamCapability[cameraId]->aux_cam_cap =
8891 getCapabilities(cameraHandle->ops, handle);
8892 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8893 rc = FAILED_TRANSACTION;
8894 free(gCamCapability[cameraId]);
8895 goto failed_op;
8896 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008897
8898 // Copy the main camera capability to main_cam_cap struct
8899 gCamCapability[cameraId]->main_cam_cap =
8900 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8901 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8902 LOGE("out of memory");
8903 rc = NO_MEMORY;
8904 goto failed_op;
8905 }
8906 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8907 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008908 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008909failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008910 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8911 cameraHandle = NULL;
8912open_failed:
8913 return rc;
8914}
8915
8916/*==========================================================================
8917 * FUNCTION : get3Aversion
8918 *
8919 * DESCRIPTION: get the Q3A S/W version
8920 *
8921 * PARAMETERS :
8922 * @sw_version: Reference of Q3A structure which will hold version info upon
8923 * return
8924 *
8925 * RETURN : None
8926 *
8927 *==========================================================================*/
8928void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8929{
8930 if(gCamCapability[mCameraId])
8931 sw_version = gCamCapability[mCameraId]->q3a_version;
8932 else
8933 LOGE("Capability structure NULL!");
8934}
8935
8936
8937/*===========================================================================
8938 * FUNCTION : initParameters
8939 *
8940 * DESCRIPTION: initialize camera parameters
8941 *
8942 * PARAMETERS :
8943 *
8944 * RETURN : int32_t type of status
8945 * NO_ERROR -- success
8946 * none-zero failure code
8947 *==========================================================================*/
8948int QCamera3HardwareInterface::initParameters()
8949{
8950 int rc = 0;
8951
8952 //Allocate Set Param Buffer
8953 mParamHeap = new QCamera3HeapMemory(1);
8954 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8955 if(rc != OK) {
8956 rc = NO_MEMORY;
8957 LOGE("Failed to allocate SETPARM Heap memory");
8958 delete mParamHeap;
8959 mParamHeap = NULL;
8960 return rc;
8961 }
8962
8963 //Map memory for parameters buffer
8964 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8965 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8966 mParamHeap->getFd(0),
8967 sizeof(metadata_buffer_t),
8968 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8969 if(rc < 0) {
8970 LOGE("failed to map SETPARM buffer");
8971 rc = FAILED_TRANSACTION;
8972 mParamHeap->deallocate();
8973 delete mParamHeap;
8974 mParamHeap = NULL;
8975 return rc;
8976 }
8977
8978 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8979
8980 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8981 return rc;
8982}
8983
8984/*===========================================================================
8985 * FUNCTION : deinitParameters
8986 *
8987 * DESCRIPTION: de-initialize camera parameters
8988 *
8989 * PARAMETERS :
8990 *
8991 * RETURN : NONE
8992 *==========================================================================*/
8993void QCamera3HardwareInterface::deinitParameters()
8994{
8995 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8996 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8997
8998 mParamHeap->deallocate();
8999 delete mParamHeap;
9000 mParamHeap = NULL;
9001
9002 mParameters = NULL;
9003
9004 free(mPrevParameters);
9005 mPrevParameters = NULL;
9006}
9007
9008/*===========================================================================
9009 * FUNCTION : calcMaxJpegSize
9010 *
9011 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9012 *
9013 * PARAMETERS :
9014 *
9015 * RETURN : max_jpeg_size
9016 *==========================================================================*/
9017size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9018{
9019 size_t max_jpeg_size = 0;
9020 size_t temp_width, temp_height;
9021 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9022 MAX_SIZES_CNT);
9023 for (size_t i = 0; i < count; i++) {
9024 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9025 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9026 if (temp_width * temp_height > max_jpeg_size ) {
9027 max_jpeg_size = temp_width * temp_height;
9028 }
9029 }
9030 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9031 return max_jpeg_size;
9032}
9033
9034/*===========================================================================
9035 * FUNCTION : getMaxRawSize
9036 *
9037 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9038 *
9039 * PARAMETERS :
9040 *
9041 * RETURN : Largest supported Raw Dimension
9042 *==========================================================================*/
9043cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9044{
9045 int max_width = 0;
9046 cam_dimension_t maxRawSize;
9047
9048 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9049 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9050 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9051 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9052 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9053 }
9054 }
9055 return maxRawSize;
9056}
9057
9058
9059/*===========================================================================
9060 * FUNCTION : calcMaxJpegDim
9061 *
9062 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9063 *
9064 * PARAMETERS :
9065 *
9066 * RETURN : max_jpeg_dim
9067 *==========================================================================*/
9068cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9069{
9070 cam_dimension_t max_jpeg_dim;
9071 cam_dimension_t curr_jpeg_dim;
9072 max_jpeg_dim.width = 0;
9073 max_jpeg_dim.height = 0;
9074 curr_jpeg_dim.width = 0;
9075 curr_jpeg_dim.height = 0;
9076 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9077 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9078 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9079 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9080 max_jpeg_dim.width * max_jpeg_dim.height ) {
9081 max_jpeg_dim.width = curr_jpeg_dim.width;
9082 max_jpeg_dim.height = curr_jpeg_dim.height;
9083 }
9084 }
9085 return max_jpeg_dim;
9086}
9087
9088/*===========================================================================
9089 * FUNCTION : addStreamConfig
9090 *
9091 * DESCRIPTION: adds the stream configuration to the array
9092 *
9093 * PARAMETERS :
9094 * @available_stream_configs : pointer to stream configuration array
9095 * @scalar_format : scalar format
9096 * @dim : configuration dimension
9097 * @config_type : input or output configuration type
9098 *
9099 * RETURN : NONE
9100 *==========================================================================*/
9101void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9102 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9103{
9104 available_stream_configs.add(scalar_format);
9105 available_stream_configs.add(dim.width);
9106 available_stream_configs.add(dim.height);
9107 available_stream_configs.add(config_type);
9108}
9109
9110/*===========================================================================
9111 * FUNCTION : suppportBurstCapture
9112 *
9113 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9114 *
9115 * PARAMETERS :
9116 * @cameraId : camera Id
9117 *
9118 * RETURN : true if camera supports BURST_CAPTURE
9119 * false otherwise
9120 *==========================================================================*/
9121bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9122{
9123 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9124 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9125 const int32_t highResWidth = 3264;
9126 const int32_t highResHeight = 2448;
9127
9128 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9129 // Maximum resolution images cannot be captured at >= 10fps
9130 // -> not supporting BURST_CAPTURE
9131 return false;
9132 }
9133
9134 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9135 // Maximum resolution images can be captured at >= 20fps
9136 // --> supporting BURST_CAPTURE
9137 return true;
9138 }
9139
9140 // Find the smallest highRes resolution, or largest resolution if there is none
9141 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9142 MAX_SIZES_CNT);
9143 size_t highRes = 0;
9144 while ((highRes + 1 < totalCnt) &&
9145 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9146 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9147 highResWidth * highResHeight)) {
9148 highRes++;
9149 }
9150 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9151 return true;
9152 } else {
9153 return false;
9154 }
9155}
9156
9157/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009158 * FUNCTION : getPDStatIndex
9159 *
9160 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9161 *
9162 * PARAMETERS :
9163 * @caps : camera capabilities
9164 *
9165 * RETURN : int32_t type
9166 * non-negative - on success
9167 * -1 - on failure
9168 *==========================================================================*/
9169int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9170 if (nullptr == caps) {
9171 return -1;
9172 }
9173
9174 uint32_t metaRawCount = caps->meta_raw_channel_count;
9175 int32_t ret = -1;
9176 for (size_t i = 0; i < metaRawCount; i++) {
9177 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9178 ret = i;
9179 break;
9180 }
9181 }
9182
9183 return ret;
9184}
9185
9186/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009187 * FUNCTION : initStaticMetadata
9188 *
9189 * DESCRIPTION: initialize the static metadata
9190 *
9191 * PARAMETERS :
9192 * @cameraId : camera Id
9193 *
9194 * RETURN : int32_t type of status
9195 * 0 -- success
9196 * non-zero failure code
9197 *==========================================================================*/
9198int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9199{
9200 int rc = 0;
9201 CameraMetadata staticInfo;
9202 size_t count = 0;
9203 bool limitedDevice = false;
9204 char prop[PROPERTY_VALUE_MAX];
9205 bool supportBurst = false;
9206
9207 supportBurst = supportBurstCapture(cameraId);
9208
9209 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9210 * guaranteed or if min fps of max resolution is less than 20 fps, its
9211 * advertised as limited device*/
9212 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9213 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9214 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9215 !supportBurst;
9216
9217 uint8_t supportedHwLvl = limitedDevice ?
9218 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009219#ifndef USE_HAL_3_3
9220 // LEVEL_3 - This device will support level 3.
9221 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9222#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009223 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009224#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009225
9226 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9227 &supportedHwLvl, 1);
9228
9229 bool facingBack = false;
9230 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9231 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9232 facingBack = true;
9233 }
9234 /*HAL 3 only*/
9235 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9236 &gCamCapability[cameraId]->min_focus_distance, 1);
9237
9238 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9239 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9240
9241 /*should be using focal lengths but sensor doesn't provide that info now*/
9242 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9243 &gCamCapability[cameraId]->focal_length,
9244 1);
9245
9246 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9247 gCamCapability[cameraId]->apertures,
9248 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9249
9250 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9251 gCamCapability[cameraId]->filter_densities,
9252 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9253
9254
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009255 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9256 size_t mode_count =
9257 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9258 for (size_t i = 0; i < mode_count; i++) {
9259 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9260 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009261 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009262 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009263
9264 int32_t lens_shading_map_size[] = {
9265 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9266 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9267 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9268 lens_shading_map_size,
9269 sizeof(lens_shading_map_size)/sizeof(int32_t));
9270
9271 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9272 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9273
9274 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9275 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9276
9277 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9278 &gCamCapability[cameraId]->max_frame_duration, 1);
9279
9280 camera_metadata_rational baseGainFactor = {
9281 gCamCapability[cameraId]->base_gain_factor.numerator,
9282 gCamCapability[cameraId]->base_gain_factor.denominator};
9283 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9284 &baseGainFactor, 1);
9285
9286 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9287 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9288
9289 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9290 gCamCapability[cameraId]->pixel_array_size.height};
9291 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9292 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9293
9294 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9295 gCamCapability[cameraId]->active_array_size.top,
9296 gCamCapability[cameraId]->active_array_size.width,
9297 gCamCapability[cameraId]->active_array_size.height};
9298 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9299 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9300
9301 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9302 &gCamCapability[cameraId]->white_level, 1);
9303
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009304 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9305 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9306 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009307 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009308 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009309
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009310#ifndef USE_HAL_3_3
9311 bool hasBlackRegions = false;
9312 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9313 LOGW("black_region_count: %d is bounded to %d",
9314 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9315 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9316 }
9317 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9318 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9319 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9320 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9321 }
9322 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9323 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9324 hasBlackRegions = true;
9325 }
9326#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009327 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9328 &gCamCapability[cameraId]->flash_charge_duration, 1);
9329
9330 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9331 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9332
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009333 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9334 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9335 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009336 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9337 &timestampSource, 1);
9338
Thierry Strudel54dc9782017-02-15 12:12:10 -08009339 //update histogram vendor data
9340 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009341 &gCamCapability[cameraId]->histogram_size, 1);
9342
Thierry Strudel54dc9782017-02-15 12:12:10 -08009343 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009344 &gCamCapability[cameraId]->max_histogram_count, 1);
9345
Shuzhen Wang14415f52016-11-16 18:26:18 -08009346 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9347 //so that app can request fewer number of bins than the maximum supported.
9348 std::vector<int32_t> histBins;
9349 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9350 histBins.push_back(maxHistBins);
9351 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9352 (maxHistBins & 0x1) == 0) {
9353 histBins.push_back(maxHistBins >> 1);
9354 maxHistBins >>= 1;
9355 }
9356 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9357 histBins.data(), histBins.size());
9358
Thierry Strudel3d639192016-09-09 11:52:26 -07009359 int32_t sharpness_map_size[] = {
9360 gCamCapability[cameraId]->sharpness_map_size.width,
9361 gCamCapability[cameraId]->sharpness_map_size.height};
9362
9363 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9364 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9365
9366 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9367 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9368
Emilian Peev0f3c3162017-03-15 12:57:46 +00009369 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9370 if (0 <= indexPD) {
9371 // Advertise PD stats data as part of the Depth capabilities
9372 int32_t depthWidth =
9373 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9374 int32_t depthHeight =
9375 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009376 int32_t depthStride =
9377 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009378 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9379 assert(0 < depthSamplesCount);
9380 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9381 &depthSamplesCount, 1);
9382
9383 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9384 depthHeight,
9385 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9386 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9387 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9388 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9389 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9390
9391 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9392 depthHeight, 33333333,
9393 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9394 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9395 depthMinDuration,
9396 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9397
9398 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9399 depthHeight, 0,
9400 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9401 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9402 depthStallDuration,
9403 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9404
9405 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9406 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009407
9408 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9409 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9410 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009411 }
9412
Thierry Strudel3d639192016-09-09 11:52:26 -07009413 int32_t scalar_formats[] = {
9414 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9415 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9416 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9417 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9418 HAL_PIXEL_FORMAT_RAW10,
9419 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009420 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9421 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9422 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009423
9424 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9425 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9426 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9427 count, MAX_SIZES_CNT, available_processed_sizes);
9428 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9429 available_processed_sizes, count * 2);
9430
9431 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9432 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9433 makeTable(gCamCapability[cameraId]->raw_dim,
9434 count, MAX_SIZES_CNT, available_raw_sizes);
9435 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9436 available_raw_sizes, count * 2);
9437
9438 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9439 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9440 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9441 count, MAX_SIZES_CNT, available_fps_ranges);
9442 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9443 available_fps_ranges, count * 2);
9444
9445 camera_metadata_rational exposureCompensationStep = {
9446 gCamCapability[cameraId]->exp_compensation_step.numerator,
9447 gCamCapability[cameraId]->exp_compensation_step.denominator};
9448 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9449 &exposureCompensationStep, 1);
9450
9451 Vector<uint8_t> availableVstabModes;
9452 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9453 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009454 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009455 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009456 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009457 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009458 count = IS_TYPE_MAX;
9459 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9460 for (size_t i = 0; i < count; i++) {
9461 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9462 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9463 eisSupported = true;
9464 break;
9465 }
9466 }
9467 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009468 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9469 }
9470 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9471 availableVstabModes.array(), availableVstabModes.size());
9472
9473 /*HAL 1 and HAL 3 common*/
9474 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9475 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9476 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009477 // Cap the max zoom to the max preferred value
9478 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009479 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9480 &maxZoom, 1);
9481
9482 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9483 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9484
9485 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9486 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9487 max3aRegions[2] = 0; /* AF not supported */
9488 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9489 max3aRegions, 3);
9490
9491 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9492 memset(prop, 0, sizeof(prop));
9493 property_get("persist.camera.facedetect", prop, "1");
9494 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9495 LOGD("Support face detection mode: %d",
9496 supportedFaceDetectMode);
9497
9498 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009499 /* support mode should be OFF if max number of face is 0 */
9500 if (maxFaces <= 0) {
9501 supportedFaceDetectMode = 0;
9502 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009503 Vector<uint8_t> availableFaceDetectModes;
9504 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9505 if (supportedFaceDetectMode == 1) {
9506 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9507 } else if (supportedFaceDetectMode == 2) {
9508 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9509 } else if (supportedFaceDetectMode == 3) {
9510 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9511 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9512 } else {
9513 maxFaces = 0;
9514 }
9515 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9516 availableFaceDetectModes.array(),
9517 availableFaceDetectModes.size());
9518 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9519 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009520 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9521 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9522 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009523
9524 int32_t exposureCompensationRange[] = {
9525 gCamCapability[cameraId]->exposure_compensation_min,
9526 gCamCapability[cameraId]->exposure_compensation_max};
9527 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9528 exposureCompensationRange,
9529 sizeof(exposureCompensationRange)/sizeof(int32_t));
9530
9531 uint8_t lensFacing = (facingBack) ?
9532 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9533 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9534
9535 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9536 available_thumbnail_sizes,
9537 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9538
9539 /*all sizes will be clubbed into this tag*/
9540 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9541 /*android.scaler.availableStreamConfigurations*/
9542 Vector<int32_t> available_stream_configs;
9543 cam_dimension_t active_array_dim;
9544 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9545 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009546
9547 /*advertise list of input dimensions supported based on below property.
9548 By default all sizes upto 5MP will be advertised.
9549 Note that the setprop resolution format should be WxH.
9550 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9551 To list all supported sizes, setprop needs to be set with "0x0" */
9552 cam_dimension_t minInputSize = {2592,1944}; //5MP
9553 memset(prop, 0, sizeof(prop));
9554 property_get("persist.camera.input.minsize", prop, "2592x1944");
9555 if (strlen(prop) > 0) {
9556 char *saveptr = NULL;
9557 char *token = strtok_r(prop, "x", &saveptr);
9558 if (token != NULL) {
9559 minInputSize.width = atoi(token);
9560 }
9561 token = strtok_r(NULL, "x", &saveptr);
9562 if (token != NULL) {
9563 minInputSize.height = atoi(token);
9564 }
9565 }
9566
Thierry Strudel3d639192016-09-09 11:52:26 -07009567 /* Add input/output stream configurations for each scalar formats*/
9568 for (size_t j = 0; j < scalar_formats_count; j++) {
9569 switch (scalar_formats[j]) {
9570 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9571 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9572 case HAL_PIXEL_FORMAT_RAW10:
9573 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9574 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9575 addStreamConfig(available_stream_configs, scalar_formats[j],
9576 gCamCapability[cameraId]->raw_dim[i],
9577 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9578 }
9579 break;
9580 case HAL_PIXEL_FORMAT_BLOB:
9581 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9582 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9583 addStreamConfig(available_stream_configs, scalar_formats[j],
9584 gCamCapability[cameraId]->picture_sizes_tbl[i],
9585 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9586 }
9587 break;
9588 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9589 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9590 default:
9591 cam_dimension_t largest_picture_size;
9592 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9593 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9594 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9595 addStreamConfig(available_stream_configs, scalar_formats[j],
9596 gCamCapability[cameraId]->picture_sizes_tbl[i],
9597 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009598 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009599 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9600 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009601 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9602 >= minInputSize.width) || (gCamCapability[cameraId]->
9603 picture_sizes_tbl[i].height >= minInputSize.height)) {
9604 addStreamConfig(available_stream_configs, scalar_formats[j],
9605 gCamCapability[cameraId]->picture_sizes_tbl[i],
9606 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9607 }
9608 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009609 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009610
Thierry Strudel3d639192016-09-09 11:52:26 -07009611 break;
9612 }
9613 }
9614
9615 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9616 available_stream_configs.array(), available_stream_configs.size());
9617 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9618 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9619
9620 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9621 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9622
9623 /* android.scaler.availableMinFrameDurations */
9624 Vector<int64_t> available_min_durations;
9625 for (size_t j = 0; j < scalar_formats_count; j++) {
9626 switch (scalar_formats[j]) {
9627 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9628 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9629 case HAL_PIXEL_FORMAT_RAW10:
9630 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9631 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9632 available_min_durations.add(scalar_formats[j]);
9633 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9634 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9635 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9636 }
9637 break;
9638 default:
9639 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9640 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9641 available_min_durations.add(scalar_formats[j]);
9642 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9643 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9644 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9645 }
9646 break;
9647 }
9648 }
9649 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9650 available_min_durations.array(), available_min_durations.size());
9651
9652 Vector<int32_t> available_hfr_configs;
9653 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9654 int32_t fps = 0;
9655 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9656 case CAM_HFR_MODE_60FPS:
9657 fps = 60;
9658 break;
9659 case CAM_HFR_MODE_90FPS:
9660 fps = 90;
9661 break;
9662 case CAM_HFR_MODE_120FPS:
9663 fps = 120;
9664 break;
9665 case CAM_HFR_MODE_150FPS:
9666 fps = 150;
9667 break;
9668 case CAM_HFR_MODE_180FPS:
9669 fps = 180;
9670 break;
9671 case CAM_HFR_MODE_210FPS:
9672 fps = 210;
9673 break;
9674 case CAM_HFR_MODE_240FPS:
9675 fps = 240;
9676 break;
9677 case CAM_HFR_MODE_480FPS:
9678 fps = 480;
9679 break;
9680 case CAM_HFR_MODE_OFF:
9681 case CAM_HFR_MODE_MAX:
9682 default:
9683 break;
9684 }
9685
9686 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9687 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9688 /* For each HFR frame rate, need to advertise one variable fps range
9689 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9690 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9691 * set by the app. When video recording is started, [120, 120] is
9692 * set. This way sensor configuration does not change when recording
9693 * is started */
9694
9695 /* (width, height, fps_min, fps_max, batch_size_max) */
9696 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9697 j < MAX_SIZES_CNT; j++) {
9698 available_hfr_configs.add(
9699 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9700 available_hfr_configs.add(
9701 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9702 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9703 available_hfr_configs.add(fps);
9704 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9705
9706 /* (width, height, fps_min, fps_max, batch_size_max) */
9707 available_hfr_configs.add(
9708 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9709 available_hfr_configs.add(
9710 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9711 available_hfr_configs.add(fps);
9712 available_hfr_configs.add(fps);
9713 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9714 }
9715 }
9716 }
9717 //Advertise HFR capability only if the property is set
9718 memset(prop, 0, sizeof(prop));
9719 property_get("persist.camera.hal3hfr.enable", prop, "1");
9720 uint8_t hfrEnable = (uint8_t)atoi(prop);
9721
9722 if(hfrEnable && available_hfr_configs.array()) {
9723 staticInfo.update(
9724 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9725 available_hfr_configs.array(), available_hfr_configs.size());
9726 }
9727
9728 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9729 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9730 &max_jpeg_size, 1);
9731
9732 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9733 size_t size = 0;
9734 count = CAM_EFFECT_MODE_MAX;
9735 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9736 for (size_t i = 0; i < count; i++) {
9737 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9738 gCamCapability[cameraId]->supported_effects[i]);
9739 if (NAME_NOT_FOUND != val) {
9740 avail_effects[size] = (uint8_t)val;
9741 size++;
9742 }
9743 }
9744 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9745 avail_effects,
9746 size);
9747
9748 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9749 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9750 size_t supported_scene_modes_cnt = 0;
9751 count = CAM_SCENE_MODE_MAX;
9752 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9753 for (size_t i = 0; i < count; i++) {
9754 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9755 CAM_SCENE_MODE_OFF) {
9756 int val = lookupFwkName(SCENE_MODES_MAP,
9757 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9758 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009759
Thierry Strudel3d639192016-09-09 11:52:26 -07009760 if (NAME_NOT_FOUND != val) {
9761 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9762 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9763 supported_scene_modes_cnt++;
9764 }
9765 }
9766 }
9767 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9768 avail_scene_modes,
9769 supported_scene_modes_cnt);
9770
9771 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9772 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9773 supported_scene_modes_cnt,
9774 CAM_SCENE_MODE_MAX,
9775 scene_mode_overrides,
9776 supported_indexes,
9777 cameraId);
9778
9779 if (supported_scene_modes_cnt == 0) {
9780 supported_scene_modes_cnt = 1;
9781 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9782 }
9783
9784 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9785 scene_mode_overrides, supported_scene_modes_cnt * 3);
9786
9787 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9788 ANDROID_CONTROL_MODE_AUTO,
9789 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9790 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9791 available_control_modes,
9792 3);
9793
9794 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9795 size = 0;
9796 count = CAM_ANTIBANDING_MODE_MAX;
9797 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9798 for (size_t i = 0; i < count; i++) {
9799 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9800 gCamCapability[cameraId]->supported_antibandings[i]);
9801 if (NAME_NOT_FOUND != val) {
9802 avail_antibanding_modes[size] = (uint8_t)val;
9803 size++;
9804 }
9805
9806 }
9807 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9808 avail_antibanding_modes,
9809 size);
9810
9811 uint8_t avail_abberation_modes[] = {
9812 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9813 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9814 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9815 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9816 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9817 if (0 == count) {
9818 // If no aberration correction modes are available for a device, this advertise OFF mode
9819 size = 1;
9820 } else {
9821 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9822 // So, advertize all 3 modes if atleast any one mode is supported as per the
9823 // new M requirement
9824 size = 3;
9825 }
9826 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9827 avail_abberation_modes,
9828 size);
9829
9830 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9831 size = 0;
9832 count = CAM_FOCUS_MODE_MAX;
9833 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9834 for (size_t i = 0; i < count; i++) {
9835 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9836 gCamCapability[cameraId]->supported_focus_modes[i]);
9837 if (NAME_NOT_FOUND != val) {
9838 avail_af_modes[size] = (uint8_t)val;
9839 size++;
9840 }
9841 }
9842 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9843 avail_af_modes,
9844 size);
9845
9846 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9847 size = 0;
9848 count = CAM_WB_MODE_MAX;
9849 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9850 for (size_t i = 0; i < count; i++) {
9851 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9852 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9853 gCamCapability[cameraId]->supported_white_balances[i]);
9854 if (NAME_NOT_FOUND != val) {
9855 avail_awb_modes[size] = (uint8_t)val;
9856 size++;
9857 }
9858 }
9859 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9860 avail_awb_modes,
9861 size);
9862
9863 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9864 count = CAM_FLASH_FIRING_LEVEL_MAX;
9865 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9866 count);
9867 for (size_t i = 0; i < count; i++) {
9868 available_flash_levels[i] =
9869 gCamCapability[cameraId]->supported_firing_levels[i];
9870 }
9871 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9872 available_flash_levels, count);
9873
9874 uint8_t flashAvailable;
9875 if (gCamCapability[cameraId]->flash_available)
9876 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9877 else
9878 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9879 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9880 &flashAvailable, 1);
9881
9882 Vector<uint8_t> avail_ae_modes;
9883 count = CAM_AE_MODE_MAX;
9884 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9885 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009886 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9887 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9888 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9889 }
9890 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009891 }
9892 if (flashAvailable) {
9893 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9894 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9895 }
9896 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9897 avail_ae_modes.array(),
9898 avail_ae_modes.size());
9899
9900 int32_t sensitivity_range[2];
9901 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9902 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9903 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9904 sensitivity_range,
9905 sizeof(sensitivity_range) / sizeof(int32_t));
9906
9907 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9908 &gCamCapability[cameraId]->max_analog_sensitivity,
9909 1);
9910
9911 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9912 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9913 &sensor_orientation,
9914 1);
9915
9916 int32_t max_output_streams[] = {
9917 MAX_STALLING_STREAMS,
9918 MAX_PROCESSED_STREAMS,
9919 MAX_RAW_STREAMS};
9920 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9921 max_output_streams,
9922 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9923
9924 uint8_t avail_leds = 0;
9925 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9926 &avail_leds, 0);
9927
9928 uint8_t focus_dist_calibrated;
9929 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9930 gCamCapability[cameraId]->focus_dist_calibrated);
9931 if (NAME_NOT_FOUND != val) {
9932 focus_dist_calibrated = (uint8_t)val;
9933 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9934 &focus_dist_calibrated, 1);
9935 }
9936
9937 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9938 size = 0;
9939 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9940 MAX_TEST_PATTERN_CNT);
9941 for (size_t i = 0; i < count; i++) {
9942 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9943 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9944 if (NAME_NOT_FOUND != testpatternMode) {
9945 avail_testpattern_modes[size] = testpatternMode;
9946 size++;
9947 }
9948 }
9949 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9950 avail_testpattern_modes,
9951 size);
9952
9953 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9954 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9955 &max_pipeline_depth,
9956 1);
9957
9958 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9959 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9960 &partial_result_count,
9961 1);
9962
9963 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9964 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9965
9966 Vector<uint8_t> available_capabilities;
9967 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9968 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9969 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9970 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9971 if (supportBurst) {
9972 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9973 }
9974 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9975 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9976 if (hfrEnable && available_hfr_configs.array()) {
9977 available_capabilities.add(
9978 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9979 }
9980
9981 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9982 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9983 }
9984 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9985 available_capabilities.array(),
9986 available_capabilities.size());
9987
9988 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9989 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9990 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9991 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9992
9993 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9994 &aeLockAvailable, 1);
9995
9996 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9997 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9998 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9999 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10000
10001 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10002 &awbLockAvailable, 1);
10003
10004 int32_t max_input_streams = 1;
10005 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10006 &max_input_streams,
10007 1);
10008
10009 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10010 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10011 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10012 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10013 HAL_PIXEL_FORMAT_YCbCr_420_888};
10014 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10015 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10016
10017 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10018 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10019 &max_latency,
10020 1);
10021
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010022#ifndef USE_HAL_3_3
10023 int32_t isp_sensitivity_range[2];
10024 isp_sensitivity_range[0] =
10025 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10026 isp_sensitivity_range[1] =
10027 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10028 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10029 isp_sensitivity_range,
10030 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10031#endif
10032
Thierry Strudel3d639192016-09-09 11:52:26 -070010033 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10034 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10035 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10036 available_hot_pixel_modes,
10037 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10038
10039 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10040 ANDROID_SHADING_MODE_FAST,
10041 ANDROID_SHADING_MODE_HIGH_QUALITY};
10042 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10043 available_shading_modes,
10044 3);
10045
10046 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10047 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10048 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10049 available_lens_shading_map_modes,
10050 2);
10051
10052 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10053 ANDROID_EDGE_MODE_FAST,
10054 ANDROID_EDGE_MODE_HIGH_QUALITY,
10055 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10056 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10057 available_edge_modes,
10058 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10059
10060 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10061 ANDROID_NOISE_REDUCTION_MODE_FAST,
10062 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10063 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10064 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10065 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10066 available_noise_red_modes,
10067 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10068
10069 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10070 ANDROID_TONEMAP_MODE_FAST,
10071 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10072 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10073 available_tonemap_modes,
10074 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10075
10076 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10077 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10078 available_hot_pixel_map_modes,
10079 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10080
10081 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10082 gCamCapability[cameraId]->reference_illuminant1);
10083 if (NAME_NOT_FOUND != val) {
10084 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10085 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10086 }
10087
10088 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10089 gCamCapability[cameraId]->reference_illuminant2);
10090 if (NAME_NOT_FOUND != val) {
10091 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10092 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10093 }
10094
10095 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10096 (void *)gCamCapability[cameraId]->forward_matrix1,
10097 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10098
10099 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10100 (void *)gCamCapability[cameraId]->forward_matrix2,
10101 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10102
10103 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10104 (void *)gCamCapability[cameraId]->color_transform1,
10105 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10106
10107 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10108 (void *)gCamCapability[cameraId]->color_transform2,
10109 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10110
10111 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10112 (void *)gCamCapability[cameraId]->calibration_transform1,
10113 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10114
10115 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10116 (void *)gCamCapability[cameraId]->calibration_transform2,
10117 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10118
10119 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10120 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10121 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10122 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10123 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10124 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10125 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10126 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10127 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10128 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10129 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10130 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10131 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10132 ANDROID_JPEG_GPS_COORDINATES,
10133 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10134 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10135 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10136 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10137 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10138 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10139 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10140 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10141 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10142 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010143#ifndef USE_HAL_3_3
10144 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10145#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010146 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010147 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010148 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10149 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010150 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010151 /* DevCamDebug metadata request_keys_basic */
10152 DEVCAMDEBUG_META_ENABLE,
10153 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010154 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010155 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010156 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010157 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010158 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010159 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010160
10161 size_t request_keys_cnt =
10162 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10163 Vector<int32_t> available_request_keys;
10164 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10165 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10166 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10167 }
10168
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010169 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010170 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10171 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10172 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010173 }
10174
Thierry Strudel3d639192016-09-09 11:52:26 -070010175 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10176 available_request_keys.array(), available_request_keys.size());
10177
10178 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10179 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10180 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10181 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10182 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10183 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10184 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10185 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10186 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10187 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10188 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10189 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10190 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10191 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10192 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10193 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10194 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010195 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010196 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10197 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10198 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010199 ANDROID_STATISTICS_FACE_SCORES,
10200#ifndef USE_HAL_3_3
10201 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10202#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010203 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010204 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010205 // DevCamDebug metadata result_keys_basic
10206 DEVCAMDEBUG_META_ENABLE,
10207 // DevCamDebug metadata result_keys AF
10208 DEVCAMDEBUG_AF_LENS_POSITION,
10209 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10210 DEVCAMDEBUG_AF_TOF_DISTANCE,
10211 DEVCAMDEBUG_AF_LUMA,
10212 DEVCAMDEBUG_AF_HAF_STATE,
10213 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10214 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10215 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10216 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10217 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10218 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10219 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10220 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10221 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10222 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10223 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10224 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10225 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10226 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10227 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10228 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10229 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10230 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10231 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10232 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10233 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10234 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10235 // DevCamDebug metadata result_keys AEC
10236 DEVCAMDEBUG_AEC_TARGET_LUMA,
10237 DEVCAMDEBUG_AEC_COMP_LUMA,
10238 DEVCAMDEBUG_AEC_AVG_LUMA,
10239 DEVCAMDEBUG_AEC_CUR_LUMA,
10240 DEVCAMDEBUG_AEC_LINECOUNT,
10241 DEVCAMDEBUG_AEC_REAL_GAIN,
10242 DEVCAMDEBUG_AEC_EXP_INDEX,
10243 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010244 // DevCamDebug metadata result_keys zzHDR
10245 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10246 DEVCAMDEBUG_AEC_L_LINECOUNT,
10247 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10248 DEVCAMDEBUG_AEC_S_LINECOUNT,
10249 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10250 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10251 // DevCamDebug metadata result_keys ADRC
10252 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10253 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10254 DEVCAMDEBUG_AEC_GTM_RATIO,
10255 DEVCAMDEBUG_AEC_LTM_RATIO,
10256 DEVCAMDEBUG_AEC_LA_RATIO,
10257 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010258 // DevCamDebug metadata result_keys AEC MOTION
10259 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10260 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10261 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010262 // DevCamDebug metadata result_keys AWB
10263 DEVCAMDEBUG_AWB_R_GAIN,
10264 DEVCAMDEBUG_AWB_G_GAIN,
10265 DEVCAMDEBUG_AWB_B_GAIN,
10266 DEVCAMDEBUG_AWB_CCT,
10267 DEVCAMDEBUG_AWB_DECISION,
10268 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010269 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10270 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10271 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010272 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010273 };
10274
Thierry Strudel3d639192016-09-09 11:52:26 -070010275 size_t result_keys_cnt =
10276 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10277
10278 Vector<int32_t> available_result_keys;
10279 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10280 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10281 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10282 }
10283 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10284 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10285 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10286 }
10287 if (supportedFaceDetectMode == 1) {
10288 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10289 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10290 } else if ((supportedFaceDetectMode == 2) ||
10291 (supportedFaceDetectMode == 3)) {
10292 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10293 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10294 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010295#ifndef USE_HAL_3_3
10296 if (hasBlackRegions) {
10297 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10298 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10299 }
10300#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010301
10302 if (gExposeEnableZslKey) {
10303 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10304 }
10305
Thierry Strudel3d639192016-09-09 11:52:26 -070010306 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10307 available_result_keys.array(), available_result_keys.size());
10308
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010309 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010310 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10311 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10312 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10313 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10314 ANDROID_SCALER_CROPPING_TYPE,
10315 ANDROID_SYNC_MAX_LATENCY,
10316 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10317 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10318 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10319 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10320 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10321 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10322 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10323 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10324 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10325 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10326 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10327 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10328 ANDROID_LENS_FACING,
10329 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10330 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10331 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10332 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10333 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10334 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10335 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10336 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10337 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10338 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10339 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10340 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10341 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10342 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10343 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10344 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10345 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10346 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10347 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10348 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010349 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010350 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10351 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10352 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10353 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10354 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10355 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10356 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10357 ANDROID_CONTROL_AVAILABLE_MODES,
10358 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10359 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10360 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10361 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010362 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10363#ifndef USE_HAL_3_3
10364 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10365 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10366#endif
10367 };
10368
10369 Vector<int32_t> available_characteristics_keys;
10370 available_characteristics_keys.appendArray(characteristics_keys_basic,
10371 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10372#ifndef USE_HAL_3_3
10373 if (hasBlackRegions) {
10374 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10375 }
10376#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010377
10378 if (0 <= indexPD) {
10379 int32_t depthKeys[] = {
10380 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10381 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10382 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10383 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10384 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10385 };
10386 available_characteristics_keys.appendArray(depthKeys,
10387 sizeof(depthKeys) / sizeof(depthKeys[0]));
10388 }
10389
Thierry Strudel3d639192016-09-09 11:52:26 -070010390 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010391 available_characteristics_keys.array(),
10392 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010393
10394 /*available stall durations depend on the hw + sw and will be different for different devices */
10395 /*have to add for raw after implementation*/
10396 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10397 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10398
10399 Vector<int64_t> available_stall_durations;
10400 for (uint32_t j = 0; j < stall_formats_count; j++) {
10401 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10402 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10403 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10404 available_stall_durations.add(stall_formats[j]);
10405 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10406 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10407 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10408 }
10409 } else {
10410 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10411 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10412 available_stall_durations.add(stall_formats[j]);
10413 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10414 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10415 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10416 }
10417 }
10418 }
10419 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10420 available_stall_durations.array(),
10421 available_stall_durations.size());
10422
10423 //QCAMERA3_OPAQUE_RAW
10424 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10425 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10426 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10427 case LEGACY_RAW:
10428 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10429 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10430 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10431 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10432 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10433 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10434 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10435 break;
10436 case MIPI_RAW:
10437 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10438 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10439 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10440 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10441 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10442 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10443 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10444 break;
10445 default:
10446 LOGE("unknown opaque_raw_format %d",
10447 gCamCapability[cameraId]->opaque_raw_fmt);
10448 break;
10449 }
10450 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10451
10452 Vector<int32_t> strides;
10453 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10454 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10455 cam_stream_buf_plane_info_t buf_planes;
10456 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10457 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10458 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10459 &gCamCapability[cameraId]->padding_info, &buf_planes);
10460 strides.add(buf_planes.plane_info.mp[0].stride);
10461 }
10462 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10463 strides.size());
10464
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010465 //TBD: remove the following line once backend advertises zzHDR in feature mask
10466 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010467 //Video HDR default
10468 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10469 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010470 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010471 int32_t vhdr_mode[] = {
10472 QCAMERA3_VIDEO_HDR_MODE_OFF,
10473 QCAMERA3_VIDEO_HDR_MODE_ON};
10474
10475 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10476 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10477 vhdr_mode, vhdr_mode_count);
10478 }
10479
Thierry Strudel3d639192016-09-09 11:52:26 -070010480 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10481 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10482 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10483
10484 uint8_t isMonoOnly =
10485 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10486 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10487 &isMonoOnly, 1);
10488
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010489#ifndef USE_HAL_3_3
10490 Vector<int32_t> opaque_size;
10491 for (size_t j = 0; j < scalar_formats_count; j++) {
10492 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10493 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10494 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10495 cam_stream_buf_plane_info_t buf_planes;
10496
10497 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10498 &gCamCapability[cameraId]->padding_info, &buf_planes);
10499
10500 if (rc == 0) {
10501 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10502 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10503 opaque_size.add(buf_planes.plane_info.frame_len);
10504 }else {
10505 LOGE("raw frame calculation failed!");
10506 }
10507 }
10508 }
10509 }
10510
10511 if ((opaque_size.size() > 0) &&
10512 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10513 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10514 else
10515 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10516#endif
10517
Thierry Strudel04e026f2016-10-10 11:27:36 -070010518 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10519 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10520 size = 0;
10521 count = CAM_IR_MODE_MAX;
10522 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10523 for (size_t i = 0; i < count; i++) {
10524 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10525 gCamCapability[cameraId]->supported_ir_modes[i]);
10526 if (NAME_NOT_FOUND != val) {
10527 avail_ir_modes[size] = (int32_t)val;
10528 size++;
10529 }
10530 }
10531 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10532 avail_ir_modes, size);
10533 }
10534
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010535 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10536 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10537 size = 0;
10538 count = CAM_AEC_CONVERGENCE_MAX;
10539 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10540 for (size_t i = 0; i < count; i++) {
10541 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10542 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10543 if (NAME_NOT_FOUND != val) {
10544 available_instant_aec_modes[size] = (int32_t)val;
10545 size++;
10546 }
10547 }
10548 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10549 available_instant_aec_modes, size);
10550 }
10551
Thierry Strudel54dc9782017-02-15 12:12:10 -080010552 int32_t sharpness_range[] = {
10553 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10554 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10555 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10556
10557 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10558 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10559 size = 0;
10560 count = CAM_BINNING_CORRECTION_MODE_MAX;
10561 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10562 for (size_t i = 0; i < count; i++) {
10563 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10564 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10565 gCamCapability[cameraId]->supported_binning_modes[i]);
10566 if (NAME_NOT_FOUND != val) {
10567 avail_binning_modes[size] = (int32_t)val;
10568 size++;
10569 }
10570 }
10571 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10572 avail_binning_modes, size);
10573 }
10574
10575 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10576 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10577 size = 0;
10578 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10579 for (size_t i = 0; i < count; i++) {
10580 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10581 gCamCapability[cameraId]->supported_aec_modes[i]);
10582 if (NAME_NOT_FOUND != val)
10583 available_aec_modes[size++] = val;
10584 }
10585 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10586 available_aec_modes, size);
10587 }
10588
10589 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10590 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10591 size = 0;
10592 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10593 for (size_t i = 0; i < count; i++) {
10594 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10595 gCamCapability[cameraId]->supported_iso_modes[i]);
10596 if (NAME_NOT_FOUND != val)
10597 available_iso_modes[size++] = val;
10598 }
10599 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10600 available_iso_modes, size);
10601 }
10602
10603 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010604 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010605 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10606 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10607 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10608
10609 int32_t available_saturation_range[4];
10610 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10611 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10612 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10613 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10614 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10615 available_saturation_range, 4);
10616
10617 uint8_t is_hdr_values[2];
10618 is_hdr_values[0] = 0;
10619 is_hdr_values[1] = 1;
10620 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10621 is_hdr_values, 2);
10622
10623 float is_hdr_confidence_range[2];
10624 is_hdr_confidence_range[0] = 0.0;
10625 is_hdr_confidence_range[1] = 1.0;
10626 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10627 is_hdr_confidence_range, 2);
10628
Emilian Peev0a972ef2017-03-16 10:25:53 +000010629 size_t eepromLength = strnlen(
10630 reinterpret_cast<const char *>(
10631 gCamCapability[cameraId]->eeprom_version_info),
10632 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10633 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010634 char easelInfo[] = ",E:N";
10635 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10636 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10637 eepromLength += sizeof(easelInfo);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010638 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10639 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010640 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010641 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010642 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10643 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10644 }
10645
Thierry Strudel3d639192016-09-09 11:52:26 -070010646 gStaticMetadata[cameraId] = staticInfo.release();
10647 return rc;
10648}
10649
10650/*===========================================================================
10651 * FUNCTION : makeTable
10652 *
10653 * DESCRIPTION: make a table of sizes
10654 *
10655 * PARAMETERS :
10656 *
10657 *
10658 *==========================================================================*/
10659void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10660 size_t max_size, int32_t *sizeTable)
10661{
10662 size_t j = 0;
10663 if (size > max_size) {
10664 size = max_size;
10665 }
10666 for (size_t i = 0; i < size; i++) {
10667 sizeTable[j] = dimTable[i].width;
10668 sizeTable[j+1] = dimTable[i].height;
10669 j+=2;
10670 }
10671}
10672
10673/*===========================================================================
10674 * FUNCTION : makeFPSTable
10675 *
10676 * DESCRIPTION: make a table of fps ranges
10677 *
10678 * PARAMETERS :
10679 *
10680 *==========================================================================*/
10681void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10682 size_t max_size, int32_t *fpsRangesTable)
10683{
10684 size_t j = 0;
10685 if (size > max_size) {
10686 size = max_size;
10687 }
10688 for (size_t i = 0; i < size; i++) {
10689 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10690 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10691 j+=2;
10692 }
10693}
10694
10695/*===========================================================================
10696 * FUNCTION : makeOverridesList
10697 *
10698 * DESCRIPTION: make a list of scene mode overrides
10699 *
10700 * PARAMETERS :
10701 *
10702 *
10703 *==========================================================================*/
10704void QCamera3HardwareInterface::makeOverridesList(
10705 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10706 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10707{
10708 /*daemon will give a list of overrides for all scene modes.
10709 However we should send the fwk only the overrides for the scene modes
10710 supported by the framework*/
10711 size_t j = 0;
10712 if (size > max_size) {
10713 size = max_size;
10714 }
10715 size_t focus_count = CAM_FOCUS_MODE_MAX;
10716 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10717 focus_count);
10718 for (size_t i = 0; i < size; i++) {
10719 bool supt = false;
10720 size_t index = supported_indexes[i];
10721 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10722 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10723 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10724 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10725 overridesTable[index].awb_mode);
10726 if (NAME_NOT_FOUND != val) {
10727 overridesList[j+1] = (uint8_t)val;
10728 }
10729 uint8_t focus_override = overridesTable[index].af_mode;
10730 for (size_t k = 0; k < focus_count; k++) {
10731 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10732 supt = true;
10733 break;
10734 }
10735 }
10736 if (supt) {
10737 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10738 focus_override);
10739 if (NAME_NOT_FOUND != val) {
10740 overridesList[j+2] = (uint8_t)val;
10741 }
10742 } else {
10743 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10744 }
10745 j+=3;
10746 }
10747}
10748
10749/*===========================================================================
10750 * FUNCTION : filterJpegSizes
10751 *
10752 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10753 * could be downscaled to
10754 *
10755 * PARAMETERS :
10756 *
10757 * RETURN : length of jpegSizes array
10758 *==========================================================================*/
10759
10760size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10761 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10762 uint8_t downscale_factor)
10763{
10764 if (0 == downscale_factor) {
10765 downscale_factor = 1;
10766 }
10767
10768 int32_t min_width = active_array_size.width / downscale_factor;
10769 int32_t min_height = active_array_size.height / downscale_factor;
10770 size_t jpegSizesCnt = 0;
10771 if (processedSizesCnt > maxCount) {
10772 processedSizesCnt = maxCount;
10773 }
10774 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10775 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10776 jpegSizes[jpegSizesCnt] = processedSizes[i];
10777 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10778 jpegSizesCnt += 2;
10779 }
10780 }
10781 return jpegSizesCnt;
10782}
10783
10784/*===========================================================================
10785 * FUNCTION : computeNoiseModelEntryS
10786 *
10787 * DESCRIPTION: function to map a given sensitivity to the S noise
10788 * model parameters in the DNG noise model.
10789 *
10790 * PARAMETERS : sens : the sensor sensitivity
10791 *
10792 ** RETURN : S (sensor amplification) noise
10793 *
10794 *==========================================================================*/
10795double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10796 double s = gCamCapability[mCameraId]->gradient_S * sens +
10797 gCamCapability[mCameraId]->offset_S;
10798 return ((s < 0.0) ? 0.0 : s);
10799}
10800
10801/*===========================================================================
10802 * FUNCTION : computeNoiseModelEntryO
10803 *
10804 * DESCRIPTION: function to map a given sensitivity to the O noise
10805 * model parameters in the DNG noise model.
10806 *
10807 * PARAMETERS : sens : the sensor sensitivity
10808 *
10809 ** RETURN : O (sensor readout) noise
10810 *
10811 *==========================================================================*/
10812double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10813 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10814 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10815 1.0 : (1.0 * sens / max_analog_sens);
10816 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10817 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10818 return ((o < 0.0) ? 0.0 : o);
10819}
10820
10821/*===========================================================================
10822 * FUNCTION : getSensorSensitivity
10823 *
10824 * DESCRIPTION: convert iso_mode to an integer value
10825 *
10826 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10827 *
10828 ** RETURN : sensitivity supported by sensor
10829 *
10830 *==========================================================================*/
10831int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10832{
10833 int32_t sensitivity;
10834
10835 switch (iso_mode) {
10836 case CAM_ISO_MODE_100:
10837 sensitivity = 100;
10838 break;
10839 case CAM_ISO_MODE_200:
10840 sensitivity = 200;
10841 break;
10842 case CAM_ISO_MODE_400:
10843 sensitivity = 400;
10844 break;
10845 case CAM_ISO_MODE_800:
10846 sensitivity = 800;
10847 break;
10848 case CAM_ISO_MODE_1600:
10849 sensitivity = 1600;
10850 break;
10851 default:
10852 sensitivity = -1;
10853 break;
10854 }
10855 return sensitivity;
10856}
10857
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010858int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010859 if (gEaselManagerClient == nullptr) {
10860 gEaselManagerClient = EaselManagerClient::create();
10861 if (gEaselManagerClient == nullptr) {
10862 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10863 return -ENODEV;
10864 }
10865 }
10866
10867 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010868 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10869 // to connect to Easel.
10870 bool doNotpowerOnEasel =
10871 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10872
10873 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010874 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10875 return OK;
10876 }
10877
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010878 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010879 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010880 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010881 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010882 return res;
10883 }
10884
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010885 EaselManagerClientOpened = true;
10886
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010887 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010888 if (res != OK) {
10889 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10890 }
10891
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010892 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010893 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010894
10895 // Expose enableZsl key only when HDR+ mode is enabled.
10896 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010897 }
10898
10899 return OK;
10900}
10901
Thierry Strudel3d639192016-09-09 11:52:26 -070010902/*===========================================================================
10903 * FUNCTION : getCamInfo
10904 *
10905 * DESCRIPTION: query camera capabilities
10906 *
10907 * PARAMETERS :
10908 * @cameraId : camera Id
10909 * @info : camera info struct to be filled in with camera capabilities
10910 *
10911 * RETURN : int type of status
10912 * NO_ERROR -- success
10913 * none-zero failure code
10914 *==========================================================================*/
10915int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10916 struct camera_info *info)
10917{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010918 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010919 int rc = 0;
10920
10921 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010922
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010923 {
10924 Mutex::Autolock l(gHdrPlusClientLock);
10925 rc = initHdrPlusClientLocked();
10926 if (rc != OK) {
10927 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10928 pthread_mutex_unlock(&gCamLock);
10929 return rc;
10930 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010931 }
10932
Thierry Strudel3d639192016-09-09 11:52:26 -070010933 if (NULL == gCamCapability[cameraId]) {
10934 rc = initCapabilities(cameraId);
10935 if (rc < 0) {
10936 pthread_mutex_unlock(&gCamLock);
10937 return rc;
10938 }
10939 }
10940
10941 if (NULL == gStaticMetadata[cameraId]) {
10942 rc = initStaticMetadata(cameraId);
10943 if (rc < 0) {
10944 pthread_mutex_unlock(&gCamLock);
10945 return rc;
10946 }
10947 }
10948
10949 switch(gCamCapability[cameraId]->position) {
10950 case CAM_POSITION_BACK:
10951 case CAM_POSITION_BACK_AUX:
10952 info->facing = CAMERA_FACING_BACK;
10953 break;
10954
10955 case CAM_POSITION_FRONT:
10956 case CAM_POSITION_FRONT_AUX:
10957 info->facing = CAMERA_FACING_FRONT;
10958 break;
10959
10960 default:
10961 LOGE("Unknown position type %d for camera id:%d",
10962 gCamCapability[cameraId]->position, cameraId);
10963 rc = -1;
10964 break;
10965 }
10966
10967
10968 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010969#ifndef USE_HAL_3_3
10970 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10971#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010972 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010973#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010974 info->static_camera_characteristics = gStaticMetadata[cameraId];
10975
10976 //For now assume both cameras can operate independently.
10977 info->conflicting_devices = NULL;
10978 info->conflicting_devices_length = 0;
10979
10980 //resource cost is 100 * MIN(1.0, m/M),
10981 //where m is throughput requirement with maximum stream configuration
10982 //and M is CPP maximum throughput.
10983 float max_fps = 0.0;
10984 for (uint32_t i = 0;
10985 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10986 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10987 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10988 }
10989 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10990 gCamCapability[cameraId]->active_array_size.width *
10991 gCamCapability[cameraId]->active_array_size.height * max_fps /
10992 gCamCapability[cameraId]->max_pixel_bandwidth;
10993 info->resource_cost = 100 * MIN(1.0, ratio);
10994 LOGI("camera %d resource cost is %d", cameraId,
10995 info->resource_cost);
10996
10997 pthread_mutex_unlock(&gCamLock);
10998 return rc;
10999}
11000
11001/*===========================================================================
11002 * FUNCTION : translateCapabilityToMetadata
11003 *
11004 * DESCRIPTION: translate the capability into camera_metadata_t
11005 *
11006 * PARAMETERS : type of the request
11007 *
11008 *
11009 * RETURN : success: camera_metadata_t*
11010 * failure: NULL
11011 *
11012 *==========================================================================*/
11013camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11014{
11015 if (mDefaultMetadata[type] != NULL) {
11016 return mDefaultMetadata[type];
11017 }
11018 //first time we are handling this request
11019 //fill up the metadata structure using the wrapper class
11020 CameraMetadata settings;
11021 //translate from cam_capability_t to camera_metadata_tag_t
11022 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11023 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11024 int32_t defaultRequestID = 0;
11025 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11026
11027 /* OIS disable */
11028 char ois_prop[PROPERTY_VALUE_MAX];
11029 memset(ois_prop, 0, sizeof(ois_prop));
11030 property_get("persist.camera.ois.disable", ois_prop, "0");
11031 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11032
11033 /* Force video to use OIS */
11034 char videoOisProp[PROPERTY_VALUE_MAX];
11035 memset(videoOisProp, 0, sizeof(videoOisProp));
11036 property_get("persist.camera.ois.video", videoOisProp, "1");
11037 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011038
11039 // Hybrid AE enable/disable
11040 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11041 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11042 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11043 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11044
Thierry Strudel3d639192016-09-09 11:52:26 -070011045 uint8_t controlIntent = 0;
11046 uint8_t focusMode;
11047 uint8_t vsMode;
11048 uint8_t optStabMode;
11049 uint8_t cacMode;
11050 uint8_t edge_mode;
11051 uint8_t noise_red_mode;
11052 uint8_t tonemap_mode;
11053 bool highQualityModeEntryAvailable = FALSE;
11054 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011055 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011056 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11057 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011058 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011059 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011060 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011061
Thierry Strudel3d639192016-09-09 11:52:26 -070011062 switch (type) {
11063 case CAMERA3_TEMPLATE_PREVIEW:
11064 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11065 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11066 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11067 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11068 edge_mode = ANDROID_EDGE_MODE_FAST;
11069 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11070 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11071 break;
11072 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11073 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11074 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11075 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11076 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11077 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11078 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11079 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11080 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11081 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11082 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11083 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11084 highQualityModeEntryAvailable = TRUE;
11085 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11086 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11087 fastModeEntryAvailable = TRUE;
11088 }
11089 }
11090 if (highQualityModeEntryAvailable) {
11091 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11092 } else if (fastModeEntryAvailable) {
11093 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11094 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011095 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11096 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11097 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011098 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011099 break;
11100 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11101 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11102 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11103 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011104 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11105 edge_mode = ANDROID_EDGE_MODE_FAST;
11106 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11107 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11108 if (forceVideoOis)
11109 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11110 break;
11111 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11112 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11113 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11114 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011115 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11116 edge_mode = ANDROID_EDGE_MODE_FAST;
11117 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11118 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11119 if (forceVideoOis)
11120 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11121 break;
11122 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11123 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11124 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11125 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11126 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11127 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11128 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11129 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11130 break;
11131 case CAMERA3_TEMPLATE_MANUAL:
11132 edge_mode = ANDROID_EDGE_MODE_FAST;
11133 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11134 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11135 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11136 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11137 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11138 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11139 break;
11140 default:
11141 edge_mode = ANDROID_EDGE_MODE_FAST;
11142 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11143 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11144 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11145 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11146 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11147 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11148 break;
11149 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011150 // Set CAC to OFF if underlying device doesn't support
11151 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11152 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11153 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011154 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11155 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11156 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11157 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11158 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11159 }
11160 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011161 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011162 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011163
11164 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11165 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11166 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11167 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11168 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11169 || ois_disable)
11170 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11171 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011172 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011173
11174 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11175 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11176
11177 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11178 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11179
11180 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11181 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11182
11183 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11184 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11185
11186 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11187 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11188
11189 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11190 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11191
11192 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11193 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11194
11195 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11196 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11197
11198 /*flash*/
11199 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11200 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11201
11202 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11203 settings.update(ANDROID_FLASH_FIRING_POWER,
11204 &flashFiringLevel, 1);
11205
11206 /* lens */
11207 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11208 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11209
11210 if (gCamCapability[mCameraId]->filter_densities_count) {
11211 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11212 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11213 gCamCapability[mCameraId]->filter_densities_count);
11214 }
11215
11216 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11217 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11218
Thierry Strudel3d639192016-09-09 11:52:26 -070011219 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11220 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11221
11222 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11223 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11224
11225 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11226 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11227
11228 /* face detection (default to OFF) */
11229 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11230 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11231
Thierry Strudel54dc9782017-02-15 12:12:10 -080011232 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11233 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011234
11235 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11236 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11237
11238 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11239 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11240
Thierry Strudel3d639192016-09-09 11:52:26 -070011241
11242 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11243 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11244
11245 /* Exposure time(Update the Min Exposure Time)*/
11246 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11247 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11248
11249 /* frame duration */
11250 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11251 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11252
11253 /* sensitivity */
11254 static const int32_t default_sensitivity = 100;
11255 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011256#ifndef USE_HAL_3_3
11257 static const int32_t default_isp_sensitivity =
11258 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11259 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11260#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011261
11262 /*edge mode*/
11263 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11264
11265 /*noise reduction mode*/
11266 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11267
11268 /*color correction mode*/
11269 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11270 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11271
11272 /*transform matrix mode*/
11273 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11274
11275 int32_t scaler_crop_region[4];
11276 scaler_crop_region[0] = 0;
11277 scaler_crop_region[1] = 0;
11278 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11279 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11280 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11281
11282 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11283 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11284
11285 /*focus distance*/
11286 float focus_distance = 0.0;
11287 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11288
11289 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011290 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011291 float max_range = 0.0;
11292 float max_fixed_fps = 0.0;
11293 int32_t fps_range[2] = {0, 0};
11294 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11295 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011296 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11297 TEMPLATE_MAX_PREVIEW_FPS) {
11298 continue;
11299 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011300 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11301 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11302 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11303 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11304 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11305 if (range > max_range) {
11306 fps_range[0] =
11307 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11308 fps_range[1] =
11309 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11310 max_range = range;
11311 }
11312 } else {
11313 if (range < 0.01 && max_fixed_fps <
11314 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11315 fps_range[0] =
11316 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11317 fps_range[1] =
11318 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11319 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11320 }
11321 }
11322 }
11323 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11324
11325 /*precapture trigger*/
11326 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11327 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11328
11329 /*af trigger*/
11330 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11331 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11332
11333 /* ae & af regions */
11334 int32_t active_region[] = {
11335 gCamCapability[mCameraId]->active_array_size.left,
11336 gCamCapability[mCameraId]->active_array_size.top,
11337 gCamCapability[mCameraId]->active_array_size.left +
11338 gCamCapability[mCameraId]->active_array_size.width,
11339 gCamCapability[mCameraId]->active_array_size.top +
11340 gCamCapability[mCameraId]->active_array_size.height,
11341 0};
11342 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11343 sizeof(active_region) / sizeof(active_region[0]));
11344 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11345 sizeof(active_region) / sizeof(active_region[0]));
11346
11347 /* black level lock */
11348 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11349 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11350
Thierry Strudel3d639192016-09-09 11:52:26 -070011351 //special defaults for manual template
11352 if (type == CAMERA3_TEMPLATE_MANUAL) {
11353 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11354 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11355
11356 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11357 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11358
11359 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11360 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11361
11362 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11363 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11364
11365 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11366 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11367
11368 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11369 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11370 }
11371
11372
11373 /* TNR
11374 * We'll use this location to determine which modes TNR will be set.
11375 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11376 * This is not to be confused with linking on a per stream basis that decision
11377 * is still on per-session basis and will be handled as part of config stream
11378 */
11379 uint8_t tnr_enable = 0;
11380
11381 if (m_bTnrPreview || m_bTnrVideo) {
11382
11383 switch (type) {
11384 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11385 tnr_enable = 1;
11386 break;
11387
11388 default:
11389 tnr_enable = 0;
11390 break;
11391 }
11392
11393 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11394 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11395 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11396
11397 LOGD("TNR:%d with process plate %d for template:%d",
11398 tnr_enable, tnr_process_type, type);
11399 }
11400
11401 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011402 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011403 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11404
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011405 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011406 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11407
Shuzhen Wang920ea402017-05-03 08:49:39 -070011408 uint8_t related_camera_id = mCameraId;
11409 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011410
11411 /* CDS default */
11412 char prop[PROPERTY_VALUE_MAX];
11413 memset(prop, 0, sizeof(prop));
11414 property_get("persist.camera.CDS", prop, "Auto");
11415 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11416 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11417 if (CAM_CDS_MODE_MAX == cds_mode) {
11418 cds_mode = CAM_CDS_MODE_AUTO;
11419 }
11420
11421 /* Disabling CDS in templates which have TNR enabled*/
11422 if (tnr_enable)
11423 cds_mode = CAM_CDS_MODE_OFF;
11424
11425 int32_t mode = cds_mode;
11426 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011427
Thierry Strudel269c81a2016-10-12 12:13:59 -070011428 /* Manual Convergence AEC Speed is disabled by default*/
11429 float default_aec_speed = 0;
11430 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11431
11432 /* Manual Convergence AWB Speed is disabled by default*/
11433 float default_awb_speed = 0;
11434 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11435
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011436 // Set instant AEC to normal convergence by default
11437 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11438 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11439
Shuzhen Wang19463d72016-03-08 11:09:52 -080011440 /* hybrid ae */
11441 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11442
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011443 if (gExposeEnableZslKey) {
11444 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11445 }
11446
Thierry Strudel3d639192016-09-09 11:52:26 -070011447 mDefaultMetadata[type] = settings.release();
11448
11449 return mDefaultMetadata[type];
11450}
11451
11452/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011453 * FUNCTION : getExpectedFrameDuration
11454 *
11455 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11456 * duration
11457 *
11458 * PARAMETERS :
11459 * @request : request settings
11460 * @frameDuration : The maximum frame duration in nanoseconds
11461 *
11462 * RETURN : None
11463 *==========================================================================*/
11464void QCamera3HardwareInterface::getExpectedFrameDuration(
11465 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11466 if (nullptr == frameDuration) {
11467 return;
11468 }
11469
11470 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11471 find_camera_metadata_ro_entry(request,
11472 ANDROID_SENSOR_EXPOSURE_TIME,
11473 &e);
11474 if (e.count > 0) {
11475 *frameDuration = e.data.i64[0];
11476 }
11477 find_camera_metadata_ro_entry(request,
11478 ANDROID_SENSOR_FRAME_DURATION,
11479 &e);
11480 if (e.count > 0) {
11481 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11482 }
11483}
11484
11485/*===========================================================================
11486 * FUNCTION : calculateMaxExpectedDuration
11487 *
11488 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11489 * current camera settings.
11490 *
11491 * PARAMETERS :
11492 * @request : request settings
11493 *
11494 * RETURN : Expected frame duration in nanoseconds.
11495 *==========================================================================*/
11496nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11497 const camera_metadata_t *request) {
11498 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11499 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11500 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11501 if (e.count == 0) {
11502 return maxExpectedDuration;
11503 }
11504
11505 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11506 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11507 }
11508
11509 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11510 return maxExpectedDuration;
11511 }
11512
11513 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11514 if (e.count == 0) {
11515 return maxExpectedDuration;
11516 }
11517
11518 switch (e.data.u8[0]) {
11519 case ANDROID_CONTROL_AE_MODE_OFF:
11520 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11521 break;
11522 default:
11523 find_camera_metadata_ro_entry(request,
11524 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11525 &e);
11526 if (e.count > 1) {
11527 maxExpectedDuration = 1e9 / e.data.u8[0];
11528 }
11529 break;
11530 }
11531
11532 return maxExpectedDuration;
11533}
11534
11535/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011536 * FUNCTION : setFrameParameters
11537 *
11538 * DESCRIPTION: set parameters per frame as requested in the metadata from
11539 * framework
11540 *
11541 * PARAMETERS :
11542 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011543 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011544 * @blob_request: Whether this request is a blob request or not
11545 *
11546 * RETURN : success: NO_ERROR
11547 * failure:
11548 *==========================================================================*/
11549int QCamera3HardwareInterface::setFrameParameters(
11550 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011551 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011552 int blob_request,
11553 uint32_t snapshotStreamId)
11554{
11555 /*translate from camera_metadata_t type to parm_type_t*/
11556 int rc = 0;
11557 int32_t hal_version = CAM_HAL_V3;
11558
11559 clear_metadata_buffer(mParameters);
11560 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11561 LOGE("Failed to set hal version in the parameters");
11562 return BAD_VALUE;
11563 }
11564
11565 /*we need to update the frame number in the parameters*/
11566 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11567 request->frame_number)) {
11568 LOGE("Failed to set the frame number in the parameters");
11569 return BAD_VALUE;
11570 }
11571
11572 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011573 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011574 LOGE("Failed to set stream type mask in the parameters");
11575 return BAD_VALUE;
11576 }
11577
11578 if (mUpdateDebugLevel) {
11579 uint32_t dummyDebugLevel = 0;
11580 /* The value of dummyDebugLevel is irrelavent. On
11581 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11582 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11583 dummyDebugLevel)) {
11584 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11585 return BAD_VALUE;
11586 }
11587 mUpdateDebugLevel = false;
11588 }
11589
11590 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011591 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011592 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11593 if (blob_request)
11594 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11595 }
11596
11597 return rc;
11598}
11599
11600/*===========================================================================
11601 * FUNCTION : setReprocParameters
11602 *
11603 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11604 * return it.
11605 *
11606 * PARAMETERS :
11607 * @request : request that needs to be serviced
11608 *
11609 * RETURN : success: NO_ERROR
11610 * failure:
11611 *==========================================================================*/
11612int32_t QCamera3HardwareInterface::setReprocParameters(
11613 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11614 uint32_t snapshotStreamId)
11615{
11616 /*translate from camera_metadata_t type to parm_type_t*/
11617 int rc = 0;
11618
11619 if (NULL == request->settings){
11620 LOGE("Reprocess settings cannot be NULL");
11621 return BAD_VALUE;
11622 }
11623
11624 if (NULL == reprocParam) {
11625 LOGE("Invalid reprocessing metadata buffer");
11626 return BAD_VALUE;
11627 }
11628 clear_metadata_buffer(reprocParam);
11629
11630 /*we need to update the frame number in the parameters*/
11631 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11632 request->frame_number)) {
11633 LOGE("Failed to set the frame number in the parameters");
11634 return BAD_VALUE;
11635 }
11636
11637 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11638 if (rc < 0) {
11639 LOGE("Failed to translate reproc request");
11640 return rc;
11641 }
11642
11643 CameraMetadata frame_settings;
11644 frame_settings = request->settings;
11645 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11646 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11647 int32_t *crop_count =
11648 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11649 int32_t *crop_data =
11650 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11651 int32_t *roi_map =
11652 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11653 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11654 cam_crop_data_t crop_meta;
11655 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11656 crop_meta.num_of_streams = 1;
11657 crop_meta.crop_info[0].crop.left = crop_data[0];
11658 crop_meta.crop_info[0].crop.top = crop_data[1];
11659 crop_meta.crop_info[0].crop.width = crop_data[2];
11660 crop_meta.crop_info[0].crop.height = crop_data[3];
11661
11662 crop_meta.crop_info[0].roi_map.left =
11663 roi_map[0];
11664 crop_meta.crop_info[0].roi_map.top =
11665 roi_map[1];
11666 crop_meta.crop_info[0].roi_map.width =
11667 roi_map[2];
11668 crop_meta.crop_info[0].roi_map.height =
11669 roi_map[3];
11670
11671 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11672 rc = BAD_VALUE;
11673 }
11674 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11675 request->input_buffer->stream,
11676 crop_meta.crop_info[0].crop.left,
11677 crop_meta.crop_info[0].crop.top,
11678 crop_meta.crop_info[0].crop.width,
11679 crop_meta.crop_info[0].crop.height);
11680 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11681 request->input_buffer->stream,
11682 crop_meta.crop_info[0].roi_map.left,
11683 crop_meta.crop_info[0].roi_map.top,
11684 crop_meta.crop_info[0].roi_map.width,
11685 crop_meta.crop_info[0].roi_map.height);
11686 } else {
11687 LOGE("Invalid reprocess crop count %d!", *crop_count);
11688 }
11689 } else {
11690 LOGE("No crop data from matching output stream");
11691 }
11692
11693 /* These settings are not needed for regular requests so handle them specially for
11694 reprocess requests; information needed for EXIF tags */
11695 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11696 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11697 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11698 if (NAME_NOT_FOUND != val) {
11699 uint32_t flashMode = (uint32_t)val;
11700 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11701 rc = BAD_VALUE;
11702 }
11703 } else {
11704 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11705 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11706 }
11707 } else {
11708 LOGH("No flash mode in reprocess settings");
11709 }
11710
11711 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11712 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11713 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11714 rc = BAD_VALUE;
11715 }
11716 } else {
11717 LOGH("No flash state in reprocess settings");
11718 }
11719
11720 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11721 uint8_t *reprocessFlags =
11722 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11723 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11724 *reprocessFlags)) {
11725 rc = BAD_VALUE;
11726 }
11727 }
11728
Thierry Strudel54dc9782017-02-15 12:12:10 -080011729 // Add exif debug data to internal metadata
11730 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11731 mm_jpeg_debug_exif_params_t *debug_params =
11732 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11733 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11734 // AE
11735 if (debug_params->ae_debug_params_valid == TRUE) {
11736 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11737 debug_params->ae_debug_params);
11738 }
11739 // AWB
11740 if (debug_params->awb_debug_params_valid == TRUE) {
11741 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11742 debug_params->awb_debug_params);
11743 }
11744 // AF
11745 if (debug_params->af_debug_params_valid == TRUE) {
11746 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11747 debug_params->af_debug_params);
11748 }
11749 // ASD
11750 if (debug_params->asd_debug_params_valid == TRUE) {
11751 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11752 debug_params->asd_debug_params);
11753 }
11754 // Stats
11755 if (debug_params->stats_debug_params_valid == TRUE) {
11756 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11757 debug_params->stats_debug_params);
11758 }
11759 // BE Stats
11760 if (debug_params->bestats_debug_params_valid == TRUE) {
11761 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11762 debug_params->bestats_debug_params);
11763 }
11764 // BHIST
11765 if (debug_params->bhist_debug_params_valid == TRUE) {
11766 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11767 debug_params->bhist_debug_params);
11768 }
11769 // 3A Tuning
11770 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11771 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11772 debug_params->q3a_tuning_debug_params);
11773 }
11774 }
11775
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011776 // Add metadata which reprocess needs
11777 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11778 cam_reprocess_info_t *repro_info =
11779 (cam_reprocess_info_t *)frame_settings.find
11780 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011781 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011782 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011783 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011784 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011785 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011786 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011787 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011788 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011789 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011790 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011791 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011792 repro_info->pipeline_flip);
11793 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11794 repro_info->af_roi);
11795 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11796 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011797 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11798 CAM_INTF_PARM_ROTATION metadata then has been added in
11799 translateToHalMetadata. HAL need to keep this new rotation
11800 metadata. Otherwise, the old rotation info saved in the vendor tag
11801 would be used */
11802 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11803 CAM_INTF_PARM_ROTATION, reprocParam) {
11804 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11805 } else {
11806 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011807 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011808 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011809 }
11810
11811 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11812 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11813 roi.width and roi.height would be the final JPEG size.
11814 For now, HAL only checks this for reprocess request */
11815 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11816 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11817 uint8_t *enable =
11818 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11819 if (*enable == TRUE) {
11820 int32_t *crop_data =
11821 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11822 cam_stream_crop_info_t crop_meta;
11823 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11824 crop_meta.stream_id = 0;
11825 crop_meta.crop.left = crop_data[0];
11826 crop_meta.crop.top = crop_data[1];
11827 crop_meta.crop.width = crop_data[2];
11828 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011829 // The JPEG crop roi should match cpp output size
11830 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11831 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11832 crop_meta.roi_map.left = 0;
11833 crop_meta.roi_map.top = 0;
11834 crop_meta.roi_map.width = cpp_crop->crop.width;
11835 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011836 }
11837 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11838 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011839 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011840 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011841 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11842 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011843 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011844 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11845
11846 // Add JPEG scale information
11847 cam_dimension_t scale_dim;
11848 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11849 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11850 int32_t *roi =
11851 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11852 scale_dim.width = roi[2];
11853 scale_dim.height = roi[3];
11854 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11855 scale_dim);
11856 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11857 scale_dim.width, scale_dim.height, mCameraId);
11858 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011859 }
11860 }
11861
11862 return rc;
11863}
11864
11865/*===========================================================================
11866 * FUNCTION : saveRequestSettings
11867 *
11868 * DESCRIPTION: Add any settings that might have changed to the request settings
11869 * and save the settings to be applied on the frame
11870 *
11871 * PARAMETERS :
11872 * @jpegMetadata : the extracted and/or modified jpeg metadata
11873 * @request : request with initial settings
11874 *
11875 * RETURN :
11876 * camera_metadata_t* : pointer to the saved request settings
11877 *==========================================================================*/
11878camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11879 const CameraMetadata &jpegMetadata,
11880 camera3_capture_request_t *request)
11881{
11882 camera_metadata_t *resultMetadata;
11883 CameraMetadata camMetadata;
11884 camMetadata = request->settings;
11885
11886 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11887 int32_t thumbnail_size[2];
11888 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11889 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11890 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11891 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11892 }
11893
11894 if (request->input_buffer != NULL) {
11895 uint8_t reprocessFlags = 1;
11896 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11897 (uint8_t*)&reprocessFlags,
11898 sizeof(reprocessFlags));
11899 }
11900
11901 resultMetadata = camMetadata.release();
11902 return resultMetadata;
11903}
11904
11905/*===========================================================================
11906 * FUNCTION : setHalFpsRange
11907 *
11908 * DESCRIPTION: set FPS range parameter
11909 *
11910 *
11911 * PARAMETERS :
11912 * @settings : Metadata from framework
11913 * @hal_metadata: Metadata buffer
11914 *
11915 *
11916 * RETURN : success: NO_ERROR
11917 * failure:
11918 *==========================================================================*/
11919int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11920 metadata_buffer_t *hal_metadata)
11921{
11922 int32_t rc = NO_ERROR;
11923 cam_fps_range_t fps_range;
11924 fps_range.min_fps = (float)
11925 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11926 fps_range.max_fps = (float)
11927 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11928 fps_range.video_min_fps = fps_range.min_fps;
11929 fps_range.video_max_fps = fps_range.max_fps;
11930
11931 LOGD("aeTargetFpsRange fps: [%f %f]",
11932 fps_range.min_fps, fps_range.max_fps);
11933 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11934 * follows:
11935 * ---------------------------------------------------------------|
11936 * Video stream is absent in configure_streams |
11937 * (Camcorder preview before the first video record |
11938 * ---------------------------------------------------------------|
11939 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11940 * | | | vid_min/max_fps|
11941 * ---------------------------------------------------------------|
11942 * NO | [ 30, 240] | 240 | [240, 240] |
11943 * |-------------|-------------|----------------|
11944 * | [240, 240] | 240 | [240, 240] |
11945 * ---------------------------------------------------------------|
11946 * Video stream is present in configure_streams |
11947 * ---------------------------------------------------------------|
11948 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11949 * | | | vid_min/max_fps|
11950 * ---------------------------------------------------------------|
11951 * NO | [ 30, 240] | 240 | [240, 240] |
11952 * (camcorder prev |-------------|-------------|----------------|
11953 * after video rec | [240, 240] | 240 | [240, 240] |
11954 * is stopped) | | | |
11955 * ---------------------------------------------------------------|
11956 * YES | [ 30, 240] | 240 | [240, 240] |
11957 * |-------------|-------------|----------------|
11958 * | [240, 240] | 240 | [240, 240] |
11959 * ---------------------------------------------------------------|
11960 * When Video stream is absent in configure_streams,
11961 * preview fps = sensor_fps / batchsize
11962 * Eg: for 240fps at batchSize 4, preview = 60fps
11963 * for 120fps at batchSize 4, preview = 30fps
11964 *
11965 * When video stream is present in configure_streams, preview fps is as per
11966 * the ratio of preview buffers to video buffers requested in process
11967 * capture request
11968 */
11969 mBatchSize = 0;
11970 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11971 fps_range.min_fps = fps_range.video_max_fps;
11972 fps_range.video_min_fps = fps_range.video_max_fps;
11973 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11974 fps_range.max_fps);
11975 if (NAME_NOT_FOUND != val) {
11976 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11977 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11978 return BAD_VALUE;
11979 }
11980
11981 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11982 /* If batchmode is currently in progress and the fps changes,
11983 * set the flag to restart the sensor */
11984 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11985 (mHFRVideoFps != fps_range.max_fps)) {
11986 mNeedSensorRestart = true;
11987 }
11988 mHFRVideoFps = fps_range.max_fps;
11989 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11990 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11991 mBatchSize = MAX_HFR_BATCH_SIZE;
11992 }
11993 }
11994 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11995
11996 }
11997 } else {
11998 /* HFR mode is session param in backend/ISP. This should be reset when
11999 * in non-HFR mode */
12000 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12001 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12002 return BAD_VALUE;
12003 }
12004 }
12005 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12006 return BAD_VALUE;
12007 }
12008 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12009 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12010 return rc;
12011}
12012
12013/*===========================================================================
12014 * FUNCTION : translateToHalMetadata
12015 *
12016 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12017 *
12018 *
12019 * PARAMETERS :
12020 * @request : request sent from framework
12021 *
12022 *
12023 * RETURN : success: NO_ERROR
12024 * failure:
12025 *==========================================================================*/
12026int QCamera3HardwareInterface::translateToHalMetadata
12027 (const camera3_capture_request_t *request,
12028 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012029 uint32_t snapshotStreamId) {
12030 if (request == nullptr || hal_metadata == nullptr) {
12031 return BAD_VALUE;
12032 }
12033
12034 int64_t minFrameDuration = getMinFrameDuration(request);
12035
12036 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12037 minFrameDuration);
12038}
12039
12040int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12041 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12042 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12043
Thierry Strudel3d639192016-09-09 11:52:26 -070012044 int rc = 0;
12045 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012046 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012047
12048 /* Do not change the order of the following list unless you know what you are
12049 * doing.
12050 * The order is laid out in such a way that parameters in the front of the table
12051 * may be used to override the parameters later in the table. Examples are:
12052 * 1. META_MODE should precede AEC/AWB/AF MODE
12053 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12054 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12055 * 4. Any mode should precede it's corresponding settings
12056 */
12057 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12058 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12059 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12060 rc = BAD_VALUE;
12061 }
12062 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12063 if (rc != NO_ERROR) {
12064 LOGE("extractSceneMode failed");
12065 }
12066 }
12067
12068 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12069 uint8_t fwk_aeMode =
12070 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12071 uint8_t aeMode;
12072 int32_t redeye;
12073
12074 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12075 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012076 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12077 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012078 } else {
12079 aeMode = CAM_AE_MODE_ON;
12080 }
12081 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12082 redeye = 1;
12083 } else {
12084 redeye = 0;
12085 }
12086
12087 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12088 fwk_aeMode);
12089 if (NAME_NOT_FOUND != val) {
12090 int32_t flashMode = (int32_t)val;
12091 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12092 }
12093
12094 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12096 rc = BAD_VALUE;
12097 }
12098 }
12099
12100 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12101 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12102 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12103 fwk_whiteLevel);
12104 if (NAME_NOT_FOUND != val) {
12105 uint8_t whiteLevel = (uint8_t)val;
12106 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12107 rc = BAD_VALUE;
12108 }
12109 }
12110 }
12111
12112 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12113 uint8_t fwk_cacMode =
12114 frame_settings.find(
12115 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12116 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12117 fwk_cacMode);
12118 if (NAME_NOT_FOUND != val) {
12119 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12120 bool entryAvailable = FALSE;
12121 // Check whether Frameworks set CAC mode is supported in device or not
12122 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12123 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12124 entryAvailable = TRUE;
12125 break;
12126 }
12127 }
12128 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12129 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12130 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12131 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12132 if (entryAvailable == FALSE) {
12133 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12134 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12135 } else {
12136 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12137 // High is not supported and so set the FAST as spec say's underlying
12138 // device implementation can be the same for both modes.
12139 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12140 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12141 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12142 // in order to avoid the fps drop due to high quality
12143 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12144 } else {
12145 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12146 }
12147 }
12148 }
12149 LOGD("Final cacMode is %d", cacMode);
12150 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12151 rc = BAD_VALUE;
12152 }
12153 } else {
12154 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12155 }
12156 }
12157
Thierry Strudel2896d122017-02-23 19:18:03 -080012158 char af_value[PROPERTY_VALUE_MAX];
12159 property_get("persist.camera.af.infinity", af_value, "0");
12160
Jason Lee84ae9972017-02-24 13:24:24 -080012161 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080012162 if (atoi(af_value) == 0) {
12163 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012164 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012165 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12166 fwk_focusMode);
12167 if (NAME_NOT_FOUND != val) {
12168 uint8_t focusMode = (uint8_t)val;
12169 LOGD("set focus mode %d", focusMode);
12170 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12171 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12172 rc = BAD_VALUE;
12173 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012174 }
12175 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012176 } else {
12177 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12178 LOGE("Focus forced to infinity %d", focusMode);
12179 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12180 rc = BAD_VALUE;
12181 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012182 }
12183
Jason Lee84ae9972017-02-24 13:24:24 -080012184 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12185 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012186 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12187 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12188 focalDistance)) {
12189 rc = BAD_VALUE;
12190 }
12191 }
12192
12193 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12194 uint8_t fwk_antibandingMode =
12195 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12196 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12197 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12198 if (NAME_NOT_FOUND != val) {
12199 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012200 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12201 if (m60HzZone) {
12202 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12203 } else {
12204 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12205 }
12206 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012207 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12208 hal_antibandingMode)) {
12209 rc = BAD_VALUE;
12210 }
12211 }
12212 }
12213
12214 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12215 int32_t expCompensation = frame_settings.find(
12216 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12217 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12218 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12219 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12220 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012221 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012222 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12223 expCompensation)) {
12224 rc = BAD_VALUE;
12225 }
12226 }
12227
12228 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12229 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12231 rc = BAD_VALUE;
12232 }
12233 }
12234 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12235 rc = setHalFpsRange(frame_settings, hal_metadata);
12236 if (rc != NO_ERROR) {
12237 LOGE("setHalFpsRange failed");
12238 }
12239 }
12240
12241 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12242 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12243 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12244 rc = BAD_VALUE;
12245 }
12246 }
12247
12248 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12249 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12250 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12251 fwk_effectMode);
12252 if (NAME_NOT_FOUND != val) {
12253 uint8_t effectMode = (uint8_t)val;
12254 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12255 rc = BAD_VALUE;
12256 }
12257 }
12258 }
12259
12260 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12261 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12262 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12263 colorCorrectMode)) {
12264 rc = BAD_VALUE;
12265 }
12266 }
12267
12268 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12269 cam_color_correct_gains_t colorCorrectGains;
12270 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12271 colorCorrectGains.gains[i] =
12272 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12273 }
12274 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12275 colorCorrectGains)) {
12276 rc = BAD_VALUE;
12277 }
12278 }
12279
12280 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12281 cam_color_correct_matrix_t colorCorrectTransform;
12282 cam_rational_type_t transform_elem;
12283 size_t num = 0;
12284 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12285 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12286 transform_elem.numerator =
12287 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12288 transform_elem.denominator =
12289 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12290 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12291 num++;
12292 }
12293 }
12294 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12295 colorCorrectTransform)) {
12296 rc = BAD_VALUE;
12297 }
12298 }
12299
12300 cam_trigger_t aecTrigger;
12301 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12302 aecTrigger.trigger_id = -1;
12303 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12304 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12305 aecTrigger.trigger =
12306 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12307 aecTrigger.trigger_id =
12308 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12309 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12310 aecTrigger)) {
12311 rc = BAD_VALUE;
12312 }
12313 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12314 aecTrigger.trigger, aecTrigger.trigger_id);
12315 }
12316
12317 /*af_trigger must come with a trigger id*/
12318 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12319 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12320 cam_trigger_t af_trigger;
12321 af_trigger.trigger =
12322 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12323 af_trigger.trigger_id =
12324 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12326 rc = BAD_VALUE;
12327 }
12328 LOGD("AfTrigger: %d AfTriggerID: %d",
12329 af_trigger.trigger, af_trigger.trigger_id);
12330 }
12331
12332 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12333 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12334 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12335 rc = BAD_VALUE;
12336 }
12337 }
12338 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12339 cam_edge_application_t edge_application;
12340 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012341
Thierry Strudel3d639192016-09-09 11:52:26 -070012342 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12343 edge_application.sharpness = 0;
12344 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012345 edge_application.sharpness =
12346 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12347 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12348 int32_t sharpness =
12349 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12350 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12351 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12352 LOGD("Setting edge mode sharpness %d", sharpness);
12353 edge_application.sharpness = sharpness;
12354 }
12355 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012356 }
12357 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12358 rc = BAD_VALUE;
12359 }
12360 }
12361
12362 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12363 int32_t respectFlashMode = 1;
12364 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12365 uint8_t fwk_aeMode =
12366 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012367 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12368 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12369 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012370 respectFlashMode = 0;
12371 LOGH("AE Mode controls flash, ignore android.flash.mode");
12372 }
12373 }
12374 if (respectFlashMode) {
12375 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12376 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12377 LOGH("flash mode after mapping %d", val);
12378 // To check: CAM_INTF_META_FLASH_MODE usage
12379 if (NAME_NOT_FOUND != val) {
12380 uint8_t flashMode = (uint8_t)val;
12381 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12382 rc = BAD_VALUE;
12383 }
12384 }
12385 }
12386 }
12387
12388 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12389 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12390 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12391 rc = BAD_VALUE;
12392 }
12393 }
12394
12395 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12396 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12397 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12398 flashFiringTime)) {
12399 rc = BAD_VALUE;
12400 }
12401 }
12402
12403 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12404 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12405 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12406 hotPixelMode)) {
12407 rc = BAD_VALUE;
12408 }
12409 }
12410
12411 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12412 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12413 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12414 lensAperture)) {
12415 rc = BAD_VALUE;
12416 }
12417 }
12418
12419 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12420 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12421 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12422 filterDensity)) {
12423 rc = BAD_VALUE;
12424 }
12425 }
12426
12427 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12428 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12429 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12430 focalLength)) {
12431 rc = BAD_VALUE;
12432 }
12433 }
12434
12435 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12436 uint8_t optStabMode =
12437 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12438 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12439 optStabMode)) {
12440 rc = BAD_VALUE;
12441 }
12442 }
12443
12444 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12445 uint8_t videoStabMode =
12446 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12447 LOGD("videoStabMode from APP = %d", videoStabMode);
12448 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12449 videoStabMode)) {
12450 rc = BAD_VALUE;
12451 }
12452 }
12453
12454
12455 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12456 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12457 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12458 noiseRedMode)) {
12459 rc = BAD_VALUE;
12460 }
12461 }
12462
12463 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12464 float reprocessEffectiveExposureFactor =
12465 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12466 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12467 reprocessEffectiveExposureFactor)) {
12468 rc = BAD_VALUE;
12469 }
12470 }
12471
12472 cam_crop_region_t scalerCropRegion;
12473 bool scalerCropSet = false;
12474 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12475 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12476 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12477 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12478 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12479
12480 // Map coordinate system from active array to sensor output.
12481 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12482 scalerCropRegion.width, scalerCropRegion.height);
12483
12484 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12485 scalerCropRegion)) {
12486 rc = BAD_VALUE;
12487 }
12488 scalerCropSet = true;
12489 }
12490
12491 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12492 int64_t sensorExpTime =
12493 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12494 LOGD("setting sensorExpTime %lld", sensorExpTime);
12495 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12496 sensorExpTime)) {
12497 rc = BAD_VALUE;
12498 }
12499 }
12500
12501 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12502 int64_t sensorFrameDuration =
12503 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012504 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12505 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12506 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12507 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12508 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12509 sensorFrameDuration)) {
12510 rc = BAD_VALUE;
12511 }
12512 }
12513
12514 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12515 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12516 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12517 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12518 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12519 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12520 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12521 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12522 sensorSensitivity)) {
12523 rc = BAD_VALUE;
12524 }
12525 }
12526
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012527#ifndef USE_HAL_3_3
12528 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12529 int32_t ispSensitivity =
12530 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12531 if (ispSensitivity <
12532 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12533 ispSensitivity =
12534 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12535 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12536 }
12537 if (ispSensitivity >
12538 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12539 ispSensitivity =
12540 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12541 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12542 }
12543 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12544 ispSensitivity)) {
12545 rc = BAD_VALUE;
12546 }
12547 }
12548#endif
12549
Thierry Strudel3d639192016-09-09 11:52:26 -070012550 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12551 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12552 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12553 rc = BAD_VALUE;
12554 }
12555 }
12556
12557 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12558 uint8_t fwk_facedetectMode =
12559 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12560
12561 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12562 fwk_facedetectMode);
12563
12564 if (NAME_NOT_FOUND != val) {
12565 uint8_t facedetectMode = (uint8_t)val;
12566 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12567 facedetectMode)) {
12568 rc = BAD_VALUE;
12569 }
12570 }
12571 }
12572
Thierry Strudel54dc9782017-02-15 12:12:10 -080012573 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012574 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012575 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012576 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12577 histogramMode)) {
12578 rc = BAD_VALUE;
12579 }
12580 }
12581
12582 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12583 uint8_t sharpnessMapMode =
12584 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12585 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12586 sharpnessMapMode)) {
12587 rc = BAD_VALUE;
12588 }
12589 }
12590
12591 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12592 uint8_t tonemapMode =
12593 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12594 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12595 rc = BAD_VALUE;
12596 }
12597 }
12598 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12599 /*All tonemap channels will have the same number of points*/
12600 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12601 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12602 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12603 cam_rgb_tonemap_curves tonemapCurves;
12604 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12605 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12606 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12607 tonemapCurves.tonemap_points_cnt,
12608 CAM_MAX_TONEMAP_CURVE_SIZE);
12609 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12610 }
12611
12612 /* ch0 = G*/
12613 size_t point = 0;
12614 cam_tonemap_curve_t tonemapCurveGreen;
12615 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12616 for (size_t j = 0; j < 2; j++) {
12617 tonemapCurveGreen.tonemap_points[i][j] =
12618 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12619 point++;
12620 }
12621 }
12622 tonemapCurves.curves[0] = tonemapCurveGreen;
12623
12624 /* ch 1 = B */
12625 point = 0;
12626 cam_tonemap_curve_t tonemapCurveBlue;
12627 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12628 for (size_t j = 0; j < 2; j++) {
12629 tonemapCurveBlue.tonemap_points[i][j] =
12630 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12631 point++;
12632 }
12633 }
12634 tonemapCurves.curves[1] = tonemapCurveBlue;
12635
12636 /* ch 2 = R */
12637 point = 0;
12638 cam_tonemap_curve_t tonemapCurveRed;
12639 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12640 for (size_t j = 0; j < 2; j++) {
12641 tonemapCurveRed.tonemap_points[i][j] =
12642 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12643 point++;
12644 }
12645 }
12646 tonemapCurves.curves[2] = tonemapCurveRed;
12647
12648 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12649 tonemapCurves)) {
12650 rc = BAD_VALUE;
12651 }
12652 }
12653
12654 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12655 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12656 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12657 captureIntent)) {
12658 rc = BAD_VALUE;
12659 }
12660 }
12661
12662 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12663 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12664 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12665 blackLevelLock)) {
12666 rc = BAD_VALUE;
12667 }
12668 }
12669
12670 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12671 uint8_t lensShadingMapMode =
12672 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12673 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12674 lensShadingMapMode)) {
12675 rc = BAD_VALUE;
12676 }
12677 }
12678
12679 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12680 cam_area_t roi;
12681 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012682 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012683
12684 // Map coordinate system from active array to sensor output.
12685 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12686 roi.rect.height);
12687
12688 if (scalerCropSet) {
12689 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12690 }
12691 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12692 rc = BAD_VALUE;
12693 }
12694 }
12695
12696 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12697 cam_area_t roi;
12698 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012699 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012700
12701 // Map coordinate system from active array to sensor output.
12702 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12703 roi.rect.height);
12704
12705 if (scalerCropSet) {
12706 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12707 }
12708 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12709 rc = BAD_VALUE;
12710 }
12711 }
12712
12713 // CDS for non-HFR non-video mode
12714 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12715 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12716 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12717 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12718 LOGE("Invalid CDS mode %d!", *fwk_cds);
12719 } else {
12720 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12721 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12722 rc = BAD_VALUE;
12723 }
12724 }
12725 }
12726
Thierry Strudel04e026f2016-10-10 11:27:36 -070012727 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012728 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012729 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012730 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12731 }
12732 if (m_bVideoHdrEnabled)
12733 vhdr = CAM_VIDEO_HDR_MODE_ON;
12734
Thierry Strudel54dc9782017-02-15 12:12:10 -080012735 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12736
12737 if(vhdr != curr_hdr_state)
12738 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12739
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012740 rc = setVideoHdrMode(mParameters, vhdr);
12741 if (rc != NO_ERROR) {
12742 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012743 }
12744
12745 //IR
12746 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12747 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12748 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012749 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12750 uint8_t isIRon = 0;
12751
12752 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012753 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12754 LOGE("Invalid IR mode %d!", fwk_ir);
12755 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012756 if(isIRon != curr_ir_state )
12757 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12758
Thierry Strudel04e026f2016-10-10 11:27:36 -070012759 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12760 CAM_INTF_META_IR_MODE, fwk_ir)) {
12761 rc = BAD_VALUE;
12762 }
12763 }
12764 }
12765
Thierry Strudel54dc9782017-02-15 12:12:10 -080012766 //Binning Correction Mode
12767 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12768 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12769 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12770 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12771 || (0 > fwk_binning_correction)) {
12772 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12773 } else {
12774 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12775 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12776 rc = BAD_VALUE;
12777 }
12778 }
12779 }
12780
Thierry Strudel269c81a2016-10-12 12:13:59 -070012781 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12782 float aec_speed;
12783 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12784 LOGD("AEC Speed :%f", aec_speed);
12785 if ( aec_speed < 0 ) {
12786 LOGE("Invalid AEC mode %f!", aec_speed);
12787 } else {
12788 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12789 aec_speed)) {
12790 rc = BAD_VALUE;
12791 }
12792 }
12793 }
12794
12795 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12796 float awb_speed;
12797 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12798 LOGD("AWB Speed :%f", awb_speed);
12799 if ( awb_speed < 0 ) {
12800 LOGE("Invalid AWB mode %f!", awb_speed);
12801 } else {
12802 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12803 awb_speed)) {
12804 rc = BAD_VALUE;
12805 }
12806 }
12807 }
12808
Thierry Strudel3d639192016-09-09 11:52:26 -070012809 // TNR
12810 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12811 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12812 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012813 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012814 cam_denoise_param_t tnr;
12815 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12816 tnr.process_plates =
12817 (cam_denoise_process_type_t)frame_settings.find(
12818 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12819 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012820
12821 if(b_TnrRequested != curr_tnr_state)
12822 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12823
Thierry Strudel3d639192016-09-09 11:52:26 -070012824 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12825 rc = BAD_VALUE;
12826 }
12827 }
12828
Thierry Strudel54dc9782017-02-15 12:12:10 -080012829 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012830 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012831 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12833 *exposure_metering_mode)) {
12834 rc = BAD_VALUE;
12835 }
12836 }
12837
Thierry Strudel3d639192016-09-09 11:52:26 -070012838 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12839 int32_t fwk_testPatternMode =
12840 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12841 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12842 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12843
12844 if (NAME_NOT_FOUND != testPatternMode) {
12845 cam_test_pattern_data_t testPatternData;
12846 memset(&testPatternData, 0, sizeof(testPatternData));
12847 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12848 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12849 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12850 int32_t *fwk_testPatternData =
12851 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12852 testPatternData.r = fwk_testPatternData[0];
12853 testPatternData.b = fwk_testPatternData[3];
12854 switch (gCamCapability[mCameraId]->color_arrangement) {
12855 case CAM_FILTER_ARRANGEMENT_RGGB:
12856 case CAM_FILTER_ARRANGEMENT_GRBG:
12857 testPatternData.gr = fwk_testPatternData[1];
12858 testPatternData.gb = fwk_testPatternData[2];
12859 break;
12860 case CAM_FILTER_ARRANGEMENT_GBRG:
12861 case CAM_FILTER_ARRANGEMENT_BGGR:
12862 testPatternData.gr = fwk_testPatternData[2];
12863 testPatternData.gb = fwk_testPatternData[1];
12864 break;
12865 default:
12866 LOGE("color arrangement %d is not supported",
12867 gCamCapability[mCameraId]->color_arrangement);
12868 break;
12869 }
12870 }
12871 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12872 testPatternData)) {
12873 rc = BAD_VALUE;
12874 }
12875 } else {
12876 LOGE("Invalid framework sensor test pattern mode %d",
12877 fwk_testPatternMode);
12878 }
12879 }
12880
12881 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12882 size_t count = 0;
12883 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12884 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12885 gps_coords.data.d, gps_coords.count, count);
12886 if (gps_coords.count != count) {
12887 rc = BAD_VALUE;
12888 }
12889 }
12890
12891 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12892 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12893 size_t count = 0;
12894 const char *gps_methods_src = (const char *)
12895 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12896 memset(gps_methods, '\0', sizeof(gps_methods));
12897 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12898 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12899 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12900 if (GPS_PROCESSING_METHOD_SIZE != count) {
12901 rc = BAD_VALUE;
12902 }
12903 }
12904
12905 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12906 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12907 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12908 gps_timestamp)) {
12909 rc = BAD_VALUE;
12910 }
12911 }
12912
12913 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12914 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12915 cam_rotation_info_t rotation_info;
12916 if (orientation == 0) {
12917 rotation_info.rotation = ROTATE_0;
12918 } else if (orientation == 90) {
12919 rotation_info.rotation = ROTATE_90;
12920 } else if (orientation == 180) {
12921 rotation_info.rotation = ROTATE_180;
12922 } else if (orientation == 270) {
12923 rotation_info.rotation = ROTATE_270;
12924 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012925 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012926 rotation_info.streamId = snapshotStreamId;
12927 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12929 rc = BAD_VALUE;
12930 }
12931 }
12932
12933 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12934 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12935 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12936 rc = BAD_VALUE;
12937 }
12938 }
12939
12940 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12941 uint32_t thumb_quality = (uint32_t)
12942 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12943 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12944 thumb_quality)) {
12945 rc = BAD_VALUE;
12946 }
12947 }
12948
12949 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12950 cam_dimension_t dim;
12951 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12952 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12953 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12954 rc = BAD_VALUE;
12955 }
12956 }
12957
12958 // Internal metadata
12959 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12960 size_t count = 0;
12961 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12962 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12963 privatedata.data.i32, privatedata.count, count);
12964 if (privatedata.count != count) {
12965 rc = BAD_VALUE;
12966 }
12967 }
12968
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012969 // ISO/Exposure Priority
12970 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12971 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12972 cam_priority_mode_t mode =
12973 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12974 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12975 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12976 use_iso_exp_pty.previewOnly = FALSE;
12977 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12978 use_iso_exp_pty.value = *ptr;
12979
12980 if(CAM_ISO_PRIORITY == mode) {
12981 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12982 use_iso_exp_pty)) {
12983 rc = BAD_VALUE;
12984 }
12985 }
12986 else {
12987 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12988 use_iso_exp_pty)) {
12989 rc = BAD_VALUE;
12990 }
12991 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012992
12993 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12994 rc = BAD_VALUE;
12995 }
12996 }
12997 } else {
12998 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12999 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013000 }
13001 }
13002
13003 // Saturation
13004 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13005 int32_t* use_saturation =
13006 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13007 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13008 rc = BAD_VALUE;
13009 }
13010 }
13011
Thierry Strudel3d639192016-09-09 11:52:26 -070013012 // EV step
13013 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13014 gCamCapability[mCameraId]->exp_compensation_step)) {
13015 rc = BAD_VALUE;
13016 }
13017
13018 // CDS info
13019 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13020 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13021 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13022
13023 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13024 CAM_INTF_META_CDS_DATA, *cdsData)) {
13025 rc = BAD_VALUE;
13026 }
13027 }
13028
Shuzhen Wang19463d72016-03-08 11:09:52 -080013029 // Hybrid AE
13030 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13031 uint8_t *hybrid_ae = (uint8_t *)
13032 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
13033
13034 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13035 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13036 rc = BAD_VALUE;
13037 }
13038 }
13039
Shuzhen Wang14415f52016-11-16 18:26:18 -080013040 // Histogram
13041 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13042 uint8_t histogramMode =
13043 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13044 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13045 histogramMode)) {
13046 rc = BAD_VALUE;
13047 }
13048 }
13049
13050 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13051 int32_t histogramBins =
13052 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13053 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13054 histogramBins)) {
13055 rc = BAD_VALUE;
13056 }
13057 }
13058
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013059 // Tracking AF
13060 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13061 uint8_t trackingAfTrigger =
13062 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13063 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13064 trackingAfTrigger)) {
13065 rc = BAD_VALUE;
13066 }
13067 }
13068
Thierry Strudel3d639192016-09-09 11:52:26 -070013069 return rc;
13070}
13071
13072/*===========================================================================
13073 * FUNCTION : captureResultCb
13074 *
13075 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13076 *
13077 * PARAMETERS :
13078 * @frame : frame information from mm-camera-interface
13079 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13080 * @userdata: userdata
13081 *
13082 * RETURN : NONE
13083 *==========================================================================*/
13084void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13085 camera3_stream_buffer_t *buffer,
13086 uint32_t frame_number, bool isInputBuffer, void *userdata)
13087{
13088 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13089 if (hw == NULL) {
13090 LOGE("Invalid hw %p", hw);
13091 return;
13092 }
13093
13094 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13095 return;
13096}
13097
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013098/*===========================================================================
13099 * FUNCTION : setBufferErrorStatus
13100 *
13101 * DESCRIPTION: Callback handler for channels to report any buffer errors
13102 *
13103 * PARAMETERS :
13104 * @ch : Channel on which buffer error is reported from
13105 * @frame_number : frame number on which buffer error is reported on
13106 * @buffer_status : buffer error status
13107 * @userdata: userdata
13108 *
13109 * RETURN : NONE
13110 *==========================================================================*/
13111void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13112 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13113{
13114 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13115 if (hw == NULL) {
13116 LOGE("Invalid hw %p", hw);
13117 return;
13118 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013119
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013120 hw->setBufferErrorStatus(ch, frame_number, err);
13121 return;
13122}
13123
13124void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13125 uint32_t frameNumber, camera3_buffer_status_t err)
13126{
13127 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13128 pthread_mutex_lock(&mMutex);
13129
13130 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13131 if (req.frame_number != frameNumber)
13132 continue;
13133 for (auto& k : req.mPendingBufferList) {
13134 if(k.stream->priv == ch) {
13135 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13136 }
13137 }
13138 }
13139
13140 pthread_mutex_unlock(&mMutex);
13141 return;
13142}
Thierry Strudel3d639192016-09-09 11:52:26 -070013143/*===========================================================================
13144 * FUNCTION : initialize
13145 *
13146 * DESCRIPTION: Pass framework callback pointers to HAL
13147 *
13148 * PARAMETERS :
13149 *
13150 *
13151 * RETURN : Success : 0
13152 * Failure: -ENODEV
13153 *==========================================================================*/
13154
13155int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13156 const camera3_callback_ops_t *callback_ops)
13157{
13158 LOGD("E");
13159 QCamera3HardwareInterface *hw =
13160 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13161 if (!hw) {
13162 LOGE("NULL camera device");
13163 return -ENODEV;
13164 }
13165
13166 int rc = hw->initialize(callback_ops);
13167 LOGD("X");
13168 return rc;
13169}
13170
13171/*===========================================================================
13172 * FUNCTION : configure_streams
13173 *
13174 * DESCRIPTION:
13175 *
13176 * PARAMETERS :
13177 *
13178 *
13179 * RETURN : Success: 0
13180 * Failure: -EINVAL (if stream configuration is invalid)
13181 * -ENODEV (fatal error)
13182 *==========================================================================*/
13183
13184int QCamera3HardwareInterface::configure_streams(
13185 const struct camera3_device *device,
13186 camera3_stream_configuration_t *stream_list)
13187{
13188 LOGD("E");
13189 QCamera3HardwareInterface *hw =
13190 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13191 if (!hw) {
13192 LOGE("NULL camera device");
13193 return -ENODEV;
13194 }
13195 int rc = hw->configureStreams(stream_list);
13196 LOGD("X");
13197 return rc;
13198}
13199
13200/*===========================================================================
13201 * FUNCTION : construct_default_request_settings
13202 *
13203 * DESCRIPTION: Configure a settings buffer to meet the required use case
13204 *
13205 * PARAMETERS :
13206 *
13207 *
13208 * RETURN : Success: Return valid metadata
13209 * Failure: Return NULL
13210 *==========================================================================*/
13211const camera_metadata_t* QCamera3HardwareInterface::
13212 construct_default_request_settings(const struct camera3_device *device,
13213 int type)
13214{
13215
13216 LOGD("E");
13217 camera_metadata_t* fwk_metadata = NULL;
13218 QCamera3HardwareInterface *hw =
13219 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13220 if (!hw) {
13221 LOGE("NULL camera device");
13222 return NULL;
13223 }
13224
13225 fwk_metadata = hw->translateCapabilityToMetadata(type);
13226
13227 LOGD("X");
13228 return fwk_metadata;
13229}
13230
13231/*===========================================================================
13232 * FUNCTION : process_capture_request
13233 *
13234 * DESCRIPTION:
13235 *
13236 * PARAMETERS :
13237 *
13238 *
13239 * RETURN :
13240 *==========================================================================*/
13241int QCamera3HardwareInterface::process_capture_request(
13242 const struct camera3_device *device,
13243 camera3_capture_request_t *request)
13244{
13245 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013246 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013247 QCamera3HardwareInterface *hw =
13248 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13249 if (!hw) {
13250 LOGE("NULL camera device");
13251 return -EINVAL;
13252 }
13253
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013254 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013255 LOGD("X");
13256 return rc;
13257}
13258
13259/*===========================================================================
13260 * FUNCTION : dump
13261 *
13262 * DESCRIPTION:
13263 *
13264 * PARAMETERS :
13265 *
13266 *
13267 * RETURN :
13268 *==========================================================================*/
13269
13270void QCamera3HardwareInterface::dump(
13271 const struct camera3_device *device, int fd)
13272{
13273 /* Log level property is read when "adb shell dumpsys media.camera" is
13274 called so that the log level can be controlled without restarting
13275 the media server */
13276 getLogLevel();
13277
13278 LOGD("E");
13279 QCamera3HardwareInterface *hw =
13280 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13281 if (!hw) {
13282 LOGE("NULL camera device");
13283 return;
13284 }
13285
13286 hw->dump(fd);
13287 LOGD("X");
13288 return;
13289}
13290
13291/*===========================================================================
13292 * FUNCTION : flush
13293 *
13294 * DESCRIPTION:
13295 *
13296 * PARAMETERS :
13297 *
13298 *
13299 * RETURN :
13300 *==========================================================================*/
13301
13302int QCamera3HardwareInterface::flush(
13303 const struct camera3_device *device)
13304{
13305 int rc;
13306 LOGD("E");
13307 QCamera3HardwareInterface *hw =
13308 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13309 if (!hw) {
13310 LOGE("NULL camera device");
13311 return -EINVAL;
13312 }
13313
13314 pthread_mutex_lock(&hw->mMutex);
13315 // Validate current state
13316 switch (hw->mState) {
13317 case STARTED:
13318 /* valid state */
13319 break;
13320
13321 case ERROR:
13322 pthread_mutex_unlock(&hw->mMutex);
13323 hw->handleCameraDeviceError();
13324 return -ENODEV;
13325
13326 default:
13327 LOGI("Flush returned during state %d", hw->mState);
13328 pthread_mutex_unlock(&hw->mMutex);
13329 return 0;
13330 }
13331 pthread_mutex_unlock(&hw->mMutex);
13332
13333 rc = hw->flush(true /* restart channels */ );
13334 LOGD("X");
13335 return rc;
13336}
13337
13338/*===========================================================================
13339 * FUNCTION : close_camera_device
13340 *
13341 * DESCRIPTION:
13342 *
13343 * PARAMETERS :
13344 *
13345 *
13346 * RETURN :
13347 *==========================================================================*/
13348int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13349{
13350 int ret = NO_ERROR;
13351 QCamera3HardwareInterface *hw =
13352 reinterpret_cast<QCamera3HardwareInterface *>(
13353 reinterpret_cast<camera3_device_t *>(device)->priv);
13354 if (!hw) {
13355 LOGE("NULL camera device");
13356 return BAD_VALUE;
13357 }
13358
13359 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13360 delete hw;
13361 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013362 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013363 return ret;
13364}
13365
13366/*===========================================================================
13367 * FUNCTION : getWaveletDenoiseProcessPlate
13368 *
13369 * DESCRIPTION: query wavelet denoise process plate
13370 *
13371 * PARAMETERS : None
13372 *
13373 * RETURN : WNR prcocess plate value
13374 *==========================================================================*/
13375cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13376{
13377 char prop[PROPERTY_VALUE_MAX];
13378 memset(prop, 0, sizeof(prop));
13379 property_get("persist.denoise.process.plates", prop, "0");
13380 int processPlate = atoi(prop);
13381 switch(processPlate) {
13382 case 0:
13383 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13384 case 1:
13385 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13386 case 2:
13387 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13388 case 3:
13389 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13390 default:
13391 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13392 }
13393}
13394
13395
13396/*===========================================================================
13397 * FUNCTION : getTemporalDenoiseProcessPlate
13398 *
13399 * DESCRIPTION: query temporal denoise process plate
13400 *
13401 * PARAMETERS : None
13402 *
13403 * RETURN : TNR prcocess plate value
13404 *==========================================================================*/
13405cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13406{
13407 char prop[PROPERTY_VALUE_MAX];
13408 memset(prop, 0, sizeof(prop));
13409 property_get("persist.tnr.process.plates", prop, "0");
13410 int processPlate = atoi(prop);
13411 switch(processPlate) {
13412 case 0:
13413 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13414 case 1:
13415 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13416 case 2:
13417 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13418 case 3:
13419 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13420 default:
13421 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13422 }
13423}
13424
13425
13426/*===========================================================================
13427 * FUNCTION : extractSceneMode
13428 *
13429 * DESCRIPTION: Extract scene mode from frameworks set metadata
13430 *
13431 * PARAMETERS :
13432 * @frame_settings: CameraMetadata reference
13433 * @metaMode: ANDROID_CONTORL_MODE
13434 * @hal_metadata: hal metadata structure
13435 *
13436 * RETURN : None
13437 *==========================================================================*/
13438int32_t QCamera3HardwareInterface::extractSceneMode(
13439 const CameraMetadata &frame_settings, uint8_t metaMode,
13440 metadata_buffer_t *hal_metadata)
13441{
13442 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013443 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13444
13445 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13446 LOGD("Ignoring control mode OFF_KEEP_STATE");
13447 return NO_ERROR;
13448 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013449
13450 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13451 camera_metadata_ro_entry entry =
13452 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13453 if (0 == entry.count)
13454 return rc;
13455
13456 uint8_t fwk_sceneMode = entry.data.u8[0];
13457
13458 int val = lookupHalName(SCENE_MODES_MAP,
13459 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13460 fwk_sceneMode);
13461 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013462 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013463 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013464 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013465 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013466
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013467 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13468 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13469 }
13470
13471 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13472 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013473 cam_hdr_param_t hdr_params;
13474 hdr_params.hdr_enable = 1;
13475 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13476 hdr_params.hdr_need_1x = false;
13477 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13478 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13479 rc = BAD_VALUE;
13480 }
13481 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013482
Thierry Strudel3d639192016-09-09 11:52:26 -070013483 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13484 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13485 rc = BAD_VALUE;
13486 }
13487 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013488
13489 if (mForceHdrSnapshot) {
13490 cam_hdr_param_t hdr_params;
13491 hdr_params.hdr_enable = 1;
13492 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13493 hdr_params.hdr_need_1x = false;
13494 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13495 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13496 rc = BAD_VALUE;
13497 }
13498 }
13499
Thierry Strudel3d639192016-09-09 11:52:26 -070013500 return rc;
13501}
13502
13503/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013504 * FUNCTION : setVideoHdrMode
13505 *
13506 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13507 *
13508 * PARAMETERS :
13509 * @hal_metadata: hal metadata structure
13510 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13511 *
13512 * RETURN : None
13513 *==========================================================================*/
13514int32_t QCamera3HardwareInterface::setVideoHdrMode(
13515 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13516{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013517 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13518 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13519 }
13520
13521 LOGE("Invalid Video HDR mode %d!", vhdr);
13522 return BAD_VALUE;
13523}
13524
13525/*===========================================================================
13526 * FUNCTION : setSensorHDR
13527 *
13528 * DESCRIPTION: Enable/disable sensor HDR.
13529 *
13530 * PARAMETERS :
13531 * @hal_metadata: hal metadata structure
13532 * @enable: boolean whether to enable/disable sensor HDR
13533 *
13534 * RETURN : None
13535 *==========================================================================*/
13536int32_t QCamera3HardwareInterface::setSensorHDR(
13537 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13538{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013539 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013540 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13541
13542 if (enable) {
13543 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13544 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13545 #ifdef _LE_CAMERA_
13546 //Default to staggered HDR for IOT
13547 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13548 #else
13549 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13550 #endif
13551 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13552 }
13553
13554 bool isSupported = false;
13555 switch (sensor_hdr) {
13556 case CAM_SENSOR_HDR_IN_SENSOR:
13557 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13558 CAM_QCOM_FEATURE_SENSOR_HDR) {
13559 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013560 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013561 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013562 break;
13563 case CAM_SENSOR_HDR_ZIGZAG:
13564 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13565 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13566 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013567 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013568 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013569 break;
13570 case CAM_SENSOR_HDR_STAGGERED:
13571 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13572 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13573 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013574 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013575 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013576 break;
13577 case CAM_SENSOR_HDR_OFF:
13578 isSupported = true;
13579 LOGD("Turning off sensor HDR");
13580 break;
13581 default:
13582 LOGE("HDR mode %d not supported", sensor_hdr);
13583 rc = BAD_VALUE;
13584 break;
13585 }
13586
13587 if(isSupported) {
13588 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13589 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13590 rc = BAD_VALUE;
13591 } else {
13592 if(!isVideoHdrEnable)
13593 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013594 }
13595 }
13596 return rc;
13597}
13598
13599/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013600 * FUNCTION : needRotationReprocess
13601 *
13602 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13603 *
13604 * PARAMETERS : none
13605 *
13606 * RETURN : true: needed
13607 * false: no need
13608 *==========================================================================*/
13609bool QCamera3HardwareInterface::needRotationReprocess()
13610{
13611 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13612 // current rotation is not zero, and pp has the capability to process rotation
13613 LOGH("need do reprocess for rotation");
13614 return true;
13615 }
13616
13617 return false;
13618}
13619
13620/*===========================================================================
13621 * FUNCTION : needReprocess
13622 *
13623 * DESCRIPTION: if reprocess in needed
13624 *
13625 * PARAMETERS : none
13626 *
13627 * RETURN : true: needed
13628 * false: no need
13629 *==========================================================================*/
13630bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13631{
13632 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13633 // TODO: add for ZSL HDR later
13634 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13635 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13636 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13637 return true;
13638 } else {
13639 LOGH("already post processed frame");
13640 return false;
13641 }
13642 }
13643 return needRotationReprocess();
13644}
13645
13646/*===========================================================================
13647 * FUNCTION : needJpegExifRotation
13648 *
13649 * DESCRIPTION: if rotation from jpeg is needed
13650 *
13651 * PARAMETERS : none
13652 *
13653 * RETURN : true: needed
13654 * false: no need
13655 *==========================================================================*/
13656bool QCamera3HardwareInterface::needJpegExifRotation()
13657{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013658 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013659 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13660 LOGD("Need use Jpeg EXIF Rotation");
13661 return true;
13662 }
13663 return false;
13664}
13665
13666/*===========================================================================
13667 * FUNCTION : addOfflineReprocChannel
13668 *
13669 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13670 * coming from input channel
13671 *
13672 * PARAMETERS :
13673 * @config : reprocess configuration
13674 * @inputChHandle : pointer to the input (source) channel
13675 *
13676 *
13677 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13678 *==========================================================================*/
13679QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13680 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13681{
13682 int32_t rc = NO_ERROR;
13683 QCamera3ReprocessChannel *pChannel = NULL;
13684
13685 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013686 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13687 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013688 if (NULL == pChannel) {
13689 LOGE("no mem for reprocess channel");
13690 return NULL;
13691 }
13692
13693 rc = pChannel->initialize(IS_TYPE_NONE);
13694 if (rc != NO_ERROR) {
13695 LOGE("init reprocess channel failed, ret = %d", rc);
13696 delete pChannel;
13697 return NULL;
13698 }
13699
13700 // pp feature config
13701 cam_pp_feature_config_t pp_config;
13702 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13703
13704 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13705 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13706 & CAM_QCOM_FEATURE_DSDN) {
13707 //Use CPP CDS incase h/w supports it.
13708 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13709 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13710 }
13711 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13712 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13713 }
13714
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013715 if (config.hdr_param.hdr_enable) {
13716 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13717 pp_config.hdr_param = config.hdr_param;
13718 }
13719
13720 if (mForceHdrSnapshot) {
13721 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13722 pp_config.hdr_param.hdr_enable = 1;
13723 pp_config.hdr_param.hdr_need_1x = 0;
13724 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13725 }
13726
Thierry Strudel3d639192016-09-09 11:52:26 -070013727 rc = pChannel->addReprocStreamsFromSource(pp_config,
13728 config,
13729 IS_TYPE_NONE,
13730 mMetadataChannel);
13731
13732 if (rc != NO_ERROR) {
13733 delete pChannel;
13734 return NULL;
13735 }
13736 return pChannel;
13737}
13738
13739/*===========================================================================
13740 * FUNCTION : getMobicatMask
13741 *
13742 * DESCRIPTION: returns mobicat mask
13743 *
13744 * PARAMETERS : none
13745 *
13746 * RETURN : mobicat mask
13747 *
13748 *==========================================================================*/
13749uint8_t QCamera3HardwareInterface::getMobicatMask()
13750{
13751 return m_MobicatMask;
13752}
13753
13754/*===========================================================================
13755 * FUNCTION : setMobicat
13756 *
13757 * DESCRIPTION: set Mobicat on/off.
13758 *
13759 * PARAMETERS :
13760 * @params : none
13761 *
13762 * RETURN : int32_t type of status
13763 * NO_ERROR -- success
13764 * none-zero failure code
13765 *==========================================================================*/
13766int32_t QCamera3HardwareInterface::setMobicat()
13767{
13768 char value [PROPERTY_VALUE_MAX];
13769 property_get("persist.camera.mobicat", value, "0");
13770 int32_t ret = NO_ERROR;
13771 uint8_t enableMobi = (uint8_t)atoi(value);
13772
13773 if (enableMobi) {
13774 tune_cmd_t tune_cmd;
13775 tune_cmd.type = SET_RELOAD_CHROMATIX;
13776 tune_cmd.module = MODULE_ALL;
13777 tune_cmd.value = TRUE;
13778 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13779 CAM_INTF_PARM_SET_VFE_COMMAND,
13780 tune_cmd);
13781
13782 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13783 CAM_INTF_PARM_SET_PP_COMMAND,
13784 tune_cmd);
13785 }
13786 m_MobicatMask = enableMobi;
13787
13788 return ret;
13789}
13790
13791/*===========================================================================
13792* FUNCTION : getLogLevel
13793*
13794* DESCRIPTION: Reads the log level property into a variable
13795*
13796* PARAMETERS :
13797* None
13798*
13799* RETURN :
13800* None
13801*==========================================================================*/
13802void QCamera3HardwareInterface::getLogLevel()
13803{
13804 char prop[PROPERTY_VALUE_MAX];
13805 uint32_t globalLogLevel = 0;
13806
13807 property_get("persist.camera.hal.debug", prop, "0");
13808 int val = atoi(prop);
13809 if (0 <= val) {
13810 gCamHal3LogLevel = (uint32_t)val;
13811 }
13812
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013813 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013814 gKpiDebugLevel = atoi(prop);
13815
13816 property_get("persist.camera.global.debug", prop, "0");
13817 val = atoi(prop);
13818 if (0 <= val) {
13819 globalLogLevel = (uint32_t)val;
13820 }
13821
13822 /* Highest log level among hal.logs and global.logs is selected */
13823 if (gCamHal3LogLevel < globalLogLevel)
13824 gCamHal3LogLevel = globalLogLevel;
13825
13826 return;
13827}
13828
13829/*===========================================================================
13830 * FUNCTION : validateStreamRotations
13831 *
13832 * DESCRIPTION: Check if the rotations requested are supported
13833 *
13834 * PARAMETERS :
13835 * @stream_list : streams to be configured
13836 *
13837 * RETURN : NO_ERROR on success
13838 * -EINVAL on failure
13839 *
13840 *==========================================================================*/
13841int QCamera3HardwareInterface::validateStreamRotations(
13842 camera3_stream_configuration_t *streamList)
13843{
13844 int rc = NO_ERROR;
13845
13846 /*
13847 * Loop through all streams requested in configuration
13848 * Check if unsupported rotations have been requested on any of them
13849 */
13850 for (size_t j = 0; j < streamList->num_streams; j++){
13851 camera3_stream_t *newStream = streamList->streams[j];
13852
Emilian Peev35ceeed2017-06-29 11:58:56 -070013853 switch(newStream->rotation) {
13854 case CAMERA3_STREAM_ROTATION_0:
13855 case CAMERA3_STREAM_ROTATION_90:
13856 case CAMERA3_STREAM_ROTATION_180:
13857 case CAMERA3_STREAM_ROTATION_270:
13858 //Expected values
13859 break;
13860 default:
13861 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13862 "type:%d and stream format:%d", __func__,
13863 newStream->rotation, newStream->stream_type,
13864 newStream->format);
13865 return -EINVAL;
13866 }
13867
Thierry Strudel3d639192016-09-09 11:52:26 -070013868 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13869 bool isImplDef = (newStream->format ==
13870 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13871 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13872 isImplDef);
13873
13874 if (isRotated && (!isImplDef || isZsl)) {
13875 LOGE("Error: Unsupported rotation of %d requested for stream"
13876 "type:%d and stream format:%d",
13877 newStream->rotation, newStream->stream_type,
13878 newStream->format);
13879 rc = -EINVAL;
13880 break;
13881 }
13882 }
13883
13884 return rc;
13885}
13886
13887/*===========================================================================
13888* FUNCTION : getFlashInfo
13889*
13890* DESCRIPTION: Retrieve information about whether the device has a flash.
13891*
13892* PARAMETERS :
13893* @cameraId : Camera id to query
13894* @hasFlash : Boolean indicating whether there is a flash device
13895* associated with given camera
13896* @flashNode : If a flash device exists, this will be its device node.
13897*
13898* RETURN :
13899* None
13900*==========================================================================*/
13901void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13902 bool& hasFlash,
13903 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13904{
13905 cam_capability_t* camCapability = gCamCapability[cameraId];
13906 if (NULL == camCapability) {
13907 hasFlash = false;
13908 flashNode[0] = '\0';
13909 } else {
13910 hasFlash = camCapability->flash_available;
13911 strlcpy(flashNode,
13912 (char*)camCapability->flash_dev_name,
13913 QCAMERA_MAX_FILEPATH_LENGTH);
13914 }
13915}
13916
13917/*===========================================================================
13918* FUNCTION : getEepromVersionInfo
13919*
13920* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13921*
13922* PARAMETERS : None
13923*
13924* RETURN : string describing EEPROM version
13925* "\0" if no such info available
13926*==========================================================================*/
13927const char *QCamera3HardwareInterface::getEepromVersionInfo()
13928{
13929 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13930}
13931
13932/*===========================================================================
13933* FUNCTION : getLdafCalib
13934*
13935* DESCRIPTION: Retrieve Laser AF calibration data
13936*
13937* PARAMETERS : None
13938*
13939* RETURN : Two uint32_t describing laser AF calibration data
13940* NULL if none is available.
13941*==========================================================================*/
13942const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13943{
13944 if (mLdafCalibExist) {
13945 return &mLdafCalib[0];
13946 } else {
13947 return NULL;
13948 }
13949}
13950
13951/*===========================================================================
13952 * FUNCTION : dynamicUpdateMetaStreamInfo
13953 *
13954 * DESCRIPTION: This function:
13955 * (1) stops all the channels
13956 * (2) returns error on pending requests and buffers
13957 * (3) sends metastream_info in setparams
13958 * (4) starts all channels
13959 * This is useful when sensor has to be restarted to apply any
13960 * settings such as frame rate from a different sensor mode
13961 *
13962 * PARAMETERS : None
13963 *
13964 * RETURN : NO_ERROR on success
13965 * Error codes on failure
13966 *
13967 *==========================================================================*/
13968int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13969{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013970 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013971 int rc = NO_ERROR;
13972
13973 LOGD("E");
13974
13975 rc = stopAllChannels();
13976 if (rc < 0) {
13977 LOGE("stopAllChannels failed");
13978 return rc;
13979 }
13980
13981 rc = notifyErrorForPendingRequests();
13982 if (rc < 0) {
13983 LOGE("notifyErrorForPendingRequests failed");
13984 return rc;
13985 }
13986
13987 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13988 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13989 "Format:%d",
13990 mStreamConfigInfo.type[i],
13991 mStreamConfigInfo.stream_sizes[i].width,
13992 mStreamConfigInfo.stream_sizes[i].height,
13993 mStreamConfigInfo.postprocess_mask[i],
13994 mStreamConfigInfo.format[i]);
13995 }
13996
13997 /* Send meta stream info once again so that ISP can start */
13998 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13999 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14000 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14001 mParameters);
14002 if (rc < 0) {
14003 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14004 }
14005
14006 rc = startAllChannels();
14007 if (rc < 0) {
14008 LOGE("startAllChannels failed");
14009 return rc;
14010 }
14011
14012 LOGD("X");
14013 return rc;
14014}
14015
14016/*===========================================================================
14017 * FUNCTION : stopAllChannels
14018 *
14019 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14020 *
14021 * PARAMETERS : None
14022 *
14023 * RETURN : NO_ERROR on success
14024 * Error codes on failure
14025 *
14026 *==========================================================================*/
14027int32_t QCamera3HardwareInterface::stopAllChannels()
14028{
14029 int32_t rc = NO_ERROR;
14030
14031 LOGD("Stopping all channels");
14032 // Stop the Streams/Channels
14033 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14034 it != mStreamInfo.end(); it++) {
14035 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14036 if (channel) {
14037 channel->stop();
14038 }
14039 (*it)->status = INVALID;
14040 }
14041
14042 if (mSupportChannel) {
14043 mSupportChannel->stop();
14044 }
14045 if (mAnalysisChannel) {
14046 mAnalysisChannel->stop();
14047 }
14048 if (mRawDumpChannel) {
14049 mRawDumpChannel->stop();
14050 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014051 if (mHdrPlusRawSrcChannel) {
14052 mHdrPlusRawSrcChannel->stop();
14053 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014054 if (mMetadataChannel) {
14055 /* If content of mStreamInfo is not 0, there is metadata stream */
14056 mMetadataChannel->stop();
14057 }
14058
14059 LOGD("All channels stopped");
14060 return rc;
14061}
14062
14063/*===========================================================================
14064 * FUNCTION : startAllChannels
14065 *
14066 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14067 *
14068 * PARAMETERS : None
14069 *
14070 * RETURN : NO_ERROR on success
14071 * Error codes on failure
14072 *
14073 *==========================================================================*/
14074int32_t QCamera3HardwareInterface::startAllChannels()
14075{
14076 int32_t rc = NO_ERROR;
14077
14078 LOGD("Start all channels ");
14079 // Start the Streams/Channels
14080 if (mMetadataChannel) {
14081 /* If content of mStreamInfo is not 0, there is metadata stream */
14082 rc = mMetadataChannel->start();
14083 if (rc < 0) {
14084 LOGE("META channel start failed");
14085 return rc;
14086 }
14087 }
14088 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14089 it != mStreamInfo.end(); it++) {
14090 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14091 if (channel) {
14092 rc = channel->start();
14093 if (rc < 0) {
14094 LOGE("channel start failed");
14095 return rc;
14096 }
14097 }
14098 }
14099 if (mAnalysisChannel) {
14100 mAnalysisChannel->start();
14101 }
14102 if (mSupportChannel) {
14103 rc = mSupportChannel->start();
14104 if (rc < 0) {
14105 LOGE("Support channel start failed");
14106 return rc;
14107 }
14108 }
14109 if (mRawDumpChannel) {
14110 rc = mRawDumpChannel->start();
14111 if (rc < 0) {
14112 LOGE("RAW dump channel start failed");
14113 return rc;
14114 }
14115 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014116 if (mHdrPlusRawSrcChannel) {
14117 rc = mHdrPlusRawSrcChannel->start();
14118 if (rc < 0) {
14119 LOGE("HDR+ RAW channel start failed");
14120 return rc;
14121 }
14122 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014123
14124 LOGD("All channels started");
14125 return rc;
14126}
14127
14128/*===========================================================================
14129 * FUNCTION : notifyErrorForPendingRequests
14130 *
14131 * DESCRIPTION: This function sends error for all the pending requests/buffers
14132 *
14133 * PARAMETERS : None
14134 *
14135 * RETURN : Error codes
14136 * NO_ERROR on success
14137 *
14138 *==========================================================================*/
14139int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14140{
Emilian Peev7650c122017-01-19 08:24:33 -080014141 notifyErrorFoPendingDepthData(mDepthChannel);
14142
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014143 auto pendingRequest = mPendingRequestsList.begin();
14144 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014145
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014146 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14147 // buffers (for which buffers aren't sent yet).
14148 while (pendingRequest != mPendingRequestsList.end() ||
14149 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14150 if (pendingRequest == mPendingRequestsList.end() ||
14151 pendingBuffer->frame_number < pendingRequest->frame_number) {
14152 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14153 // with error.
14154 for (auto &info : pendingBuffer->mPendingBufferList) {
14155 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014156 camera3_notify_msg_t notify_msg;
14157 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14158 notify_msg.type = CAMERA3_MSG_ERROR;
14159 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014160 notify_msg.message.error.error_stream = info.stream;
14161 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014162 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014163
14164 camera3_stream_buffer_t buffer = {};
14165 buffer.acquire_fence = -1;
14166 buffer.release_fence = -1;
14167 buffer.buffer = info.buffer;
14168 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14169 buffer.stream = info.stream;
14170 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014171 }
14172
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014173 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14174 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14175 pendingBuffer->frame_number > pendingRequest->frame_number) {
14176 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014177 camera3_notify_msg_t notify_msg;
14178 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14179 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014180 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14181 notify_msg.message.error.error_stream = nullptr;
14182 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014183 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014184
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014185 if (pendingRequest->input_buffer != nullptr) {
14186 camera3_capture_result result = {};
14187 result.frame_number = pendingRequest->frame_number;
14188 result.result = nullptr;
14189 result.input_buffer = pendingRequest->input_buffer;
14190 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014191 }
14192
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014193 mShutterDispatcher.clear(pendingRequest->frame_number);
14194 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14195 } else {
14196 // If both buffers and result metadata weren't sent yet, notify about a request error
14197 // and return buffers with error.
14198 for (auto &info : pendingBuffer->mPendingBufferList) {
14199 camera3_notify_msg_t notify_msg;
14200 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14201 notify_msg.type = CAMERA3_MSG_ERROR;
14202 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14203 notify_msg.message.error.error_stream = info.stream;
14204 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14205 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014206
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014207 camera3_stream_buffer_t buffer = {};
14208 buffer.acquire_fence = -1;
14209 buffer.release_fence = -1;
14210 buffer.buffer = info.buffer;
14211 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14212 buffer.stream = info.stream;
14213 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14214 }
14215
14216 if (pendingRequest->input_buffer != nullptr) {
14217 camera3_capture_result result = {};
14218 result.frame_number = pendingRequest->frame_number;
14219 result.result = nullptr;
14220 result.input_buffer = pendingRequest->input_buffer;
14221 orchestrateResult(&result);
14222 }
14223
14224 mShutterDispatcher.clear(pendingRequest->frame_number);
14225 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14226 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014227 }
14228 }
14229
14230 /* Reset pending frame Drop list and requests list */
14231 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014232 mShutterDispatcher.clear();
14233 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014234 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014235 mExpectedFrameDuration = 0;
14236 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014237 LOGH("Cleared all the pending buffers ");
14238
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014239 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014240}
14241
14242bool QCamera3HardwareInterface::isOnEncoder(
14243 const cam_dimension_t max_viewfinder_size,
14244 uint32_t width, uint32_t height)
14245{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014246 return ((width > (uint32_t)max_viewfinder_size.width) ||
14247 (height > (uint32_t)max_viewfinder_size.height) ||
14248 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14249 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014250}
14251
14252/*===========================================================================
14253 * FUNCTION : setBundleInfo
14254 *
14255 * DESCRIPTION: Set bundle info for all streams that are bundle.
14256 *
14257 * PARAMETERS : None
14258 *
14259 * RETURN : NO_ERROR on success
14260 * Error codes on failure
14261 *==========================================================================*/
14262int32_t QCamera3HardwareInterface::setBundleInfo()
14263{
14264 int32_t rc = NO_ERROR;
14265
14266 if (mChannelHandle) {
14267 cam_bundle_config_t bundleInfo;
14268 memset(&bundleInfo, 0, sizeof(bundleInfo));
14269 rc = mCameraHandle->ops->get_bundle_info(
14270 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14271 if (rc != NO_ERROR) {
14272 LOGE("get_bundle_info failed");
14273 return rc;
14274 }
14275 if (mAnalysisChannel) {
14276 mAnalysisChannel->setBundleInfo(bundleInfo);
14277 }
14278 if (mSupportChannel) {
14279 mSupportChannel->setBundleInfo(bundleInfo);
14280 }
14281 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14282 it != mStreamInfo.end(); it++) {
14283 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14284 channel->setBundleInfo(bundleInfo);
14285 }
14286 if (mRawDumpChannel) {
14287 mRawDumpChannel->setBundleInfo(bundleInfo);
14288 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014289 if (mHdrPlusRawSrcChannel) {
14290 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14291 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014292 }
14293
14294 return rc;
14295}
14296
14297/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014298 * FUNCTION : setInstantAEC
14299 *
14300 * DESCRIPTION: Set Instant AEC related params.
14301 *
14302 * PARAMETERS :
14303 * @meta: CameraMetadata reference
14304 *
14305 * RETURN : NO_ERROR on success
14306 * Error codes on failure
14307 *==========================================================================*/
14308int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14309{
14310 int32_t rc = NO_ERROR;
14311 uint8_t val = 0;
14312 char prop[PROPERTY_VALUE_MAX];
14313
14314 // First try to configure instant AEC from framework metadata
14315 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14316 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14317 }
14318
14319 // If framework did not set this value, try to read from set prop.
14320 if (val == 0) {
14321 memset(prop, 0, sizeof(prop));
14322 property_get("persist.camera.instant.aec", prop, "0");
14323 val = (uint8_t)atoi(prop);
14324 }
14325
14326 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14327 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14328 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14329 mInstantAEC = val;
14330 mInstantAECSettledFrameNumber = 0;
14331 mInstantAecFrameIdxCount = 0;
14332 LOGH("instantAEC value set %d",val);
14333 if (mInstantAEC) {
14334 memset(prop, 0, sizeof(prop));
14335 property_get("persist.camera.ae.instant.bound", prop, "10");
14336 int32_t aec_frame_skip_cnt = atoi(prop);
14337 if (aec_frame_skip_cnt >= 0) {
14338 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14339 } else {
14340 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14341 rc = BAD_VALUE;
14342 }
14343 }
14344 } else {
14345 LOGE("Bad instant aec value set %d", val);
14346 rc = BAD_VALUE;
14347 }
14348 return rc;
14349}
14350
14351/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014352 * FUNCTION : get_num_overall_buffers
14353 *
14354 * DESCRIPTION: Estimate number of pending buffers across all requests.
14355 *
14356 * PARAMETERS : None
14357 *
14358 * RETURN : Number of overall pending buffers
14359 *
14360 *==========================================================================*/
14361uint32_t PendingBuffersMap::get_num_overall_buffers()
14362{
14363 uint32_t sum_buffers = 0;
14364 for (auto &req : mPendingBuffersInRequest) {
14365 sum_buffers += req.mPendingBufferList.size();
14366 }
14367 return sum_buffers;
14368}
14369
14370/*===========================================================================
14371 * FUNCTION : removeBuf
14372 *
14373 * DESCRIPTION: Remove a matching buffer from tracker.
14374 *
14375 * PARAMETERS : @buffer: image buffer for the callback
14376 *
14377 * RETURN : None
14378 *
14379 *==========================================================================*/
14380void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14381{
14382 bool buffer_found = false;
14383 for (auto req = mPendingBuffersInRequest.begin();
14384 req != mPendingBuffersInRequest.end(); req++) {
14385 for (auto k = req->mPendingBufferList.begin();
14386 k != req->mPendingBufferList.end(); k++ ) {
14387 if (k->buffer == buffer) {
14388 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14389 req->frame_number, buffer);
14390 k = req->mPendingBufferList.erase(k);
14391 if (req->mPendingBufferList.empty()) {
14392 // Remove this request from Map
14393 req = mPendingBuffersInRequest.erase(req);
14394 }
14395 buffer_found = true;
14396 break;
14397 }
14398 }
14399 if (buffer_found) {
14400 break;
14401 }
14402 }
14403 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14404 get_num_overall_buffers());
14405}
14406
14407/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014408 * FUNCTION : getBufErrStatus
14409 *
14410 * DESCRIPTION: get buffer error status
14411 *
14412 * PARAMETERS : @buffer: buffer handle
14413 *
14414 * RETURN : Error status
14415 *
14416 *==========================================================================*/
14417int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14418{
14419 for (auto& req : mPendingBuffersInRequest) {
14420 for (auto& k : req.mPendingBufferList) {
14421 if (k.buffer == buffer)
14422 return k.bufStatus;
14423 }
14424 }
14425 return CAMERA3_BUFFER_STATUS_OK;
14426}
14427
14428/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014429 * FUNCTION : setPAAFSupport
14430 *
14431 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14432 * feature mask according to stream type and filter
14433 * arrangement
14434 *
14435 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14436 * @stream_type: stream type
14437 * @filter_arrangement: filter arrangement
14438 *
14439 * RETURN : None
14440 *==========================================================================*/
14441void QCamera3HardwareInterface::setPAAFSupport(
14442 cam_feature_mask_t& feature_mask,
14443 cam_stream_type_t stream_type,
14444 cam_color_filter_arrangement_t filter_arrangement)
14445{
Thierry Strudel3d639192016-09-09 11:52:26 -070014446 switch (filter_arrangement) {
14447 case CAM_FILTER_ARRANGEMENT_RGGB:
14448 case CAM_FILTER_ARRANGEMENT_GRBG:
14449 case CAM_FILTER_ARRANGEMENT_GBRG:
14450 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014451 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14452 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014453 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014454 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14455 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014456 }
14457 break;
14458 case CAM_FILTER_ARRANGEMENT_Y:
14459 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14460 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14461 }
14462 break;
14463 default:
14464 break;
14465 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014466 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14467 feature_mask, stream_type, filter_arrangement);
14468
14469
Thierry Strudel3d639192016-09-09 11:52:26 -070014470}
14471
14472/*===========================================================================
14473* FUNCTION : getSensorMountAngle
14474*
14475* DESCRIPTION: Retrieve sensor mount angle
14476*
14477* PARAMETERS : None
14478*
14479* RETURN : sensor mount angle in uint32_t
14480*==========================================================================*/
14481uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14482{
14483 return gCamCapability[mCameraId]->sensor_mount_angle;
14484}
14485
14486/*===========================================================================
14487* FUNCTION : getRelatedCalibrationData
14488*
14489* DESCRIPTION: Retrieve related system calibration data
14490*
14491* PARAMETERS : None
14492*
14493* RETURN : Pointer of related system calibration data
14494*==========================================================================*/
14495const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14496{
14497 return (const cam_related_system_calibration_data_t *)
14498 &(gCamCapability[mCameraId]->related_cam_calibration);
14499}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014500
14501/*===========================================================================
14502 * FUNCTION : is60HzZone
14503 *
14504 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14505 *
14506 * PARAMETERS : None
14507 *
14508 * RETURN : True if in 60Hz zone, False otherwise
14509 *==========================================================================*/
14510bool QCamera3HardwareInterface::is60HzZone()
14511{
14512 time_t t = time(NULL);
14513 struct tm lt;
14514
14515 struct tm* r = localtime_r(&t, &lt);
14516
14517 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14518 return true;
14519 else
14520 return false;
14521}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014522
14523/*===========================================================================
14524 * FUNCTION : adjustBlackLevelForCFA
14525 *
14526 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14527 * of bayer CFA (Color Filter Array).
14528 *
14529 * PARAMETERS : @input: black level pattern in the order of RGGB
14530 * @output: black level pattern in the order of CFA
14531 * @color_arrangement: CFA color arrangement
14532 *
14533 * RETURN : None
14534 *==========================================================================*/
14535template<typename T>
14536void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14537 T input[BLACK_LEVEL_PATTERN_CNT],
14538 T output[BLACK_LEVEL_PATTERN_CNT],
14539 cam_color_filter_arrangement_t color_arrangement)
14540{
14541 switch (color_arrangement) {
14542 case CAM_FILTER_ARRANGEMENT_GRBG:
14543 output[0] = input[1];
14544 output[1] = input[0];
14545 output[2] = input[3];
14546 output[3] = input[2];
14547 break;
14548 case CAM_FILTER_ARRANGEMENT_GBRG:
14549 output[0] = input[2];
14550 output[1] = input[3];
14551 output[2] = input[0];
14552 output[3] = input[1];
14553 break;
14554 case CAM_FILTER_ARRANGEMENT_BGGR:
14555 output[0] = input[3];
14556 output[1] = input[2];
14557 output[2] = input[1];
14558 output[3] = input[0];
14559 break;
14560 case CAM_FILTER_ARRANGEMENT_RGGB:
14561 output[0] = input[0];
14562 output[1] = input[1];
14563 output[2] = input[2];
14564 output[3] = input[3];
14565 break;
14566 default:
14567 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14568 break;
14569 }
14570}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014571
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014572void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14573 CameraMetadata &resultMetadata,
14574 std::shared_ptr<metadata_buffer_t> settings)
14575{
14576 if (settings == nullptr) {
14577 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14578 return;
14579 }
14580
14581 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14582 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14583 }
14584
14585 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14586 String8 str((const char *)gps_methods);
14587 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14588 }
14589
14590 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14591 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14592 }
14593
14594 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14595 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14596 }
14597
14598 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14599 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14600 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14601 }
14602
14603 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14604 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14605 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14606 }
14607
14608 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14609 int32_t fwk_thumb_size[2];
14610 fwk_thumb_size[0] = thumb_size->width;
14611 fwk_thumb_size[1] = thumb_size->height;
14612 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14613 }
14614
14615 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14616 uint8_t fwk_intent = intent[0];
14617 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14618 }
14619}
14620
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014621bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14622 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14623 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014624{
14625 if (hdrPlusRequest == nullptr) return false;
14626
14627 // Check noise reduction mode is high quality.
14628 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14629 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14630 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014631 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14632 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014633 return false;
14634 }
14635
14636 // Check edge mode is high quality.
14637 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14638 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14639 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14640 return false;
14641 }
14642
14643 if (request.num_output_buffers != 1 ||
14644 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14645 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014646 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14647 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14648 request.output_buffers[0].stream->width,
14649 request.output_buffers[0].stream->height,
14650 request.output_buffers[0].stream->format);
14651 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014652 return false;
14653 }
14654
14655 // Get a YUV buffer from pic channel.
14656 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14657 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14658 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14659 if (res != OK) {
14660 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14661 __FUNCTION__, strerror(-res), res);
14662 return false;
14663 }
14664
14665 pbcamera::StreamBuffer buffer;
14666 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014667 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014668 buffer.data = yuvBuffer->buffer;
14669 buffer.dataSize = yuvBuffer->frame_len;
14670
14671 pbcamera::CaptureRequest pbRequest;
14672 pbRequest.id = request.frame_number;
14673 pbRequest.outputBuffers.push_back(buffer);
14674
14675 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014676 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014677 if (res != OK) {
14678 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14679 strerror(-res), res);
14680 return false;
14681 }
14682
14683 hdrPlusRequest->yuvBuffer = yuvBuffer;
14684 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14685
14686 return true;
14687}
14688
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014689status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14690{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014691 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14692 return OK;
14693 }
14694
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014695 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014696 if (res != OK) {
14697 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14698 strerror(-res), res);
14699 return res;
14700 }
14701 gHdrPlusClientOpening = true;
14702
14703 return OK;
14704}
14705
Chien-Yu Chenee335912017-02-09 17:53:20 -080014706status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14707{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014708 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014709
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014710 if (mHdrPlusModeEnabled) {
14711 return OK;
14712 }
14713
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014714 // Check if gHdrPlusClient is opened or being opened.
14715 if (gHdrPlusClient == nullptr) {
14716 if (gHdrPlusClientOpening) {
14717 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14718 return OK;
14719 }
14720
14721 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014722 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014723 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14724 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014725 return res;
14726 }
14727
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014728 // When opening HDR+ client completes, HDR+ mode will be enabled.
14729 return OK;
14730
Chien-Yu Chenee335912017-02-09 17:53:20 -080014731 }
14732
14733 // Configure stream for HDR+.
14734 res = configureHdrPlusStreamsLocked();
14735 if (res != OK) {
14736 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014737 return res;
14738 }
14739
14740 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14741 res = gHdrPlusClient->setZslHdrPlusMode(true);
14742 if (res != OK) {
14743 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014744 return res;
14745 }
14746
14747 mHdrPlusModeEnabled = true;
14748 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14749
14750 return OK;
14751}
14752
14753void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14754{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014755 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014756 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014757 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14758 if (res != OK) {
14759 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14760 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014761
14762 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014763 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014764 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014765 }
14766
14767 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014768 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014769 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14770}
14771
14772status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014773{
14774 pbcamera::InputConfiguration inputConfig;
14775 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14776 status_t res = OK;
14777
14778 // Configure HDR+ client streams.
14779 // Get input config.
14780 if (mHdrPlusRawSrcChannel) {
14781 // HDR+ input buffers will be provided by HAL.
14782 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14783 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14784 if (res != OK) {
14785 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14786 __FUNCTION__, strerror(-res), res);
14787 return res;
14788 }
14789
14790 inputConfig.isSensorInput = false;
14791 } else {
14792 // Sensor MIPI will send data to Easel.
14793 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014794 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014795 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14796 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14797 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14798 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14799 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014800 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014801 if (mSensorModeInfo.num_raw_bits != 10) {
14802 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14803 mSensorModeInfo.num_raw_bits);
14804 return BAD_VALUE;
14805 }
14806
14807 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014808 }
14809
14810 // Get output configurations.
14811 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014812 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014813
14814 // Easel may need to output YUV output buffers if mPictureChannel was created.
14815 pbcamera::StreamConfiguration yuvOutputConfig;
14816 if (mPictureChannel != nullptr) {
14817 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14818 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14819 if (res != OK) {
14820 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14821 __FUNCTION__, strerror(-res), res);
14822
14823 return res;
14824 }
14825
14826 outputStreamConfigs.push_back(yuvOutputConfig);
14827 }
14828
14829 // TODO: consider other channels for YUV output buffers.
14830
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014831 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014832 if (res != OK) {
14833 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14834 strerror(-res), res);
14835 return res;
14836 }
14837
14838 return OK;
14839}
14840
Chien-Yu Chen933db802017-07-14 14:31:53 -070014841void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
14842{
14843 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
14844 // Set HAL state to error.
14845 pthread_mutex_lock(&mMutex);
14846 mState = ERROR;
14847 pthread_mutex_unlock(&mMutex);
14848
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014849 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen933db802017-07-14 14:31:53 -070014850}
14851
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014852void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14853{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014854 if (client == nullptr) {
14855 ALOGE("%s: Opened client is null.", __FUNCTION__);
14856 return;
14857 }
14858
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014859 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014860 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14861
14862 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014863 if (!gHdrPlusClientOpening) {
14864 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14865 return;
14866 }
14867
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014868 gHdrPlusClient = std::move(client);
14869 gHdrPlusClientOpening = false;
14870
14871 // Set static metadata.
14872 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14873 if (res != OK) {
14874 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14875 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014876 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014877 gHdrPlusClient = nullptr;
14878 return;
14879 }
14880
14881 // Enable HDR+ mode.
14882 res = enableHdrPlusModeLocked();
14883 if (res != OK) {
14884 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14885 }
14886}
14887
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014888void QCamera3HardwareInterface::onOpenFailed(status_t err)
14889{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014890 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14891 Mutex::Autolock l(gHdrPlusClientLock);
14892 gHdrPlusClientOpening = false;
14893}
14894
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014895void QCamera3HardwareInterface::onFatalError()
14896{
14897 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14898
14899 // Set HAL state to error.
14900 pthread_mutex_lock(&mMutex);
14901 mState = ERROR;
14902 pthread_mutex_unlock(&mMutex);
14903
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014904 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014905}
14906
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014907void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014908 const camera_metadata_t &resultMetadata)
14909{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014910 if (result != nullptr) {
14911 if (result->outputBuffers.size() != 1) {
14912 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14913 result->outputBuffers.size());
14914 return;
14915 }
14916
14917 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14918 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14919 result->outputBuffers[0].streamId);
14920 return;
14921 }
14922
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014923 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014924 HdrPlusPendingRequest pendingRequest;
14925 {
14926 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14927 auto req = mHdrPlusPendingRequests.find(result->requestId);
14928 pendingRequest = req->second;
14929 }
14930
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014931 // Update the result metadata with the settings of the HDR+ still capture request because
14932 // the result metadata belongs to a ZSL buffer.
14933 CameraMetadata metadata;
14934 metadata = &resultMetadata;
14935 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14936 camera_metadata_t* updatedResultMetadata = metadata.release();
14937
14938 QCamera3PicChannel *picChannel =
14939 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14940
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014941 // Check if dumping HDR+ YUV output is enabled.
14942 char prop[PROPERTY_VALUE_MAX];
14943 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14944 bool dumpYuvOutput = atoi(prop);
14945
14946 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014947 // Dump yuv buffer to a ppm file.
14948 pbcamera::StreamConfiguration outputConfig;
14949 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14950 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14951 if (rc == OK) {
14952 char buf[FILENAME_MAX] = {};
14953 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14954 result->requestId, result->outputBuffers[0].streamId,
14955 outputConfig.image.width, outputConfig.image.height);
14956
14957 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14958 } else {
14959 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14960 __FUNCTION__, strerror(-rc), rc);
14961 }
14962 }
14963
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014964 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14965 auto halMetadata = std::make_shared<metadata_buffer_t>();
14966 clear_metadata_buffer(halMetadata.get());
14967
14968 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14969 // encoding.
14970 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14971 halStreamId, /*minFrameDuration*/0);
14972 if (res == OK) {
14973 // Return the buffer to pic channel for encoding.
14974 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14975 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14976 halMetadata);
14977 } else {
14978 // Return the buffer without encoding.
14979 // TODO: This should not happen but we may want to report an error buffer to camera
14980 // service.
14981 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14982 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14983 strerror(-res), res);
14984 }
14985
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014986 // Find the timestamp
14987 camera_metadata_ro_entry_t entry;
14988 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14989 ANDROID_SENSOR_TIMESTAMP, &entry);
14990 if (res != OK) {
14991 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14992 __FUNCTION__, result->requestId, strerror(-res), res);
14993 } else {
14994 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14995 }
14996
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014997 // Send HDR+ metadata to framework.
14998 {
14999 pthread_mutex_lock(&mMutex);
15000
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015001 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15002 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015003 pthread_mutex_unlock(&mMutex);
15004 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015005
15006 // Remove the HDR+ pending request.
15007 {
15008 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15009 auto req = mHdrPlusPendingRequests.find(result->requestId);
15010 mHdrPlusPendingRequests.erase(req);
15011 }
15012 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015013}
15014
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015015void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15016{
15017 if (failedResult == nullptr) {
15018 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15019 return;
15020 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015021
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015022 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015023
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015024 // Remove the pending HDR+ request.
15025 {
15026 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15027 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
15028
15029 // Return the buffer to pic channel.
15030 QCamera3PicChannel *picChannel =
15031 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
15032 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
15033
15034 mHdrPlusPendingRequests.erase(pendingRequest);
15035 }
15036
15037 pthread_mutex_lock(&mMutex);
15038
15039 // Find the pending buffers.
15040 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15041 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15042 if (pendingBuffers->frame_number == failedResult->requestId) {
15043 break;
15044 }
15045 pendingBuffers++;
15046 }
15047
15048 // Send out buffer errors for the pending buffers.
15049 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15050 std::vector<camera3_stream_buffer_t> streamBuffers;
15051 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15052 // Prepare a stream buffer.
15053 camera3_stream_buffer_t streamBuffer = {};
15054 streamBuffer.stream = buffer.stream;
15055 streamBuffer.buffer = buffer.buffer;
15056 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15057 streamBuffer.acquire_fence = -1;
15058 streamBuffer.release_fence = -1;
15059
15060 streamBuffers.push_back(streamBuffer);
15061
15062 // Send out error buffer event.
15063 camera3_notify_msg_t notify_msg = {};
15064 notify_msg.type = CAMERA3_MSG_ERROR;
15065 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15066 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15067 notify_msg.message.error.error_stream = buffer.stream;
15068
15069 orchestrateNotify(&notify_msg);
15070 }
15071
15072 camera3_capture_result_t result = {};
15073 result.frame_number = pendingBuffers->frame_number;
15074 result.num_output_buffers = streamBuffers.size();
15075 result.output_buffers = &streamBuffers[0];
15076
15077 // Send out result with buffer errors.
15078 orchestrateResult(&result);
15079
15080 // Remove pending buffers.
15081 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15082 }
15083
15084 // Remove pending request.
15085 auto halRequest = mPendingRequestsList.begin();
15086 while (halRequest != mPendingRequestsList.end()) {
15087 if (halRequest->frame_number == failedResult->requestId) {
15088 mPendingRequestsList.erase(halRequest);
15089 break;
15090 }
15091 halRequest++;
15092 }
15093
15094 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015095}
15096
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015097
15098ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15099 mParent(parent) {}
15100
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015101void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015102{
15103 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015104
15105 if (isReprocess) {
15106 mReprocessShutters.emplace(frameNumber, Shutter());
15107 } else {
15108 mShutters.emplace(frameNumber, Shutter());
15109 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015110}
15111
15112void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15113{
15114 std::lock_guard<std::mutex> lock(mLock);
15115
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015116 std::map<uint32_t, Shutter> *shutters = nullptr;
15117
15118 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015119 auto shutter = mShutters.find(frameNumber);
15120 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015121 shutter = mReprocessShutters.find(frameNumber);
15122 if (shutter == mReprocessShutters.end()) {
15123 // Shutter was already sent.
15124 return;
15125 }
15126 shutters = &mReprocessShutters;
15127 } else {
15128 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015129 }
15130
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015131 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015132 shutter->second.ready = true;
15133 shutter->second.timestamp = timestamp;
15134
15135 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015136 shutter = shutters->begin();
15137 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015138 if (!shutter->second.ready) {
15139 // If this shutter is not ready, the following shutters can't be sent.
15140 break;
15141 }
15142
15143 camera3_notify_msg_t msg = {};
15144 msg.type = CAMERA3_MSG_SHUTTER;
15145 msg.message.shutter.frame_number = shutter->first;
15146 msg.message.shutter.timestamp = shutter->second.timestamp;
15147 mParent->orchestrateNotify(&msg);
15148
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015149 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015150 }
15151}
15152
15153void ShutterDispatcher::clear(uint32_t frameNumber)
15154{
15155 std::lock_guard<std::mutex> lock(mLock);
15156 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015157 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015158}
15159
15160void ShutterDispatcher::clear()
15161{
15162 std::lock_guard<std::mutex> lock(mLock);
15163
15164 // Log errors for stale shutters.
15165 for (auto &shutter : mShutters) {
15166 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15167 __FUNCTION__, shutter.first, shutter.second.ready,
15168 shutter.second.timestamp);
15169 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015170
15171 // Log errors for stale reprocess shutters.
15172 for (auto &shutter : mReprocessShutters) {
15173 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15174 __FUNCTION__, shutter.first, shutter.second.ready,
15175 shutter.second.timestamp);
15176 }
15177
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015178 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015179 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015180}
15181
15182OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15183 mParent(parent) {}
15184
15185status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15186{
15187 std::lock_guard<std::mutex> lock(mLock);
15188 mStreamBuffers.clear();
15189 if (!streamList) {
15190 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15191 return -EINVAL;
15192 }
15193
15194 // Create a "frame-number -> buffer" map for each stream.
15195 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15196 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15197 }
15198
15199 return OK;
15200}
15201
15202status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15203{
15204 std::lock_guard<std::mutex> lock(mLock);
15205
15206 // Find the "frame-number -> buffer" map for the stream.
15207 auto buffers = mStreamBuffers.find(stream);
15208 if (buffers == mStreamBuffers.end()) {
15209 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15210 return -EINVAL;
15211 }
15212
15213 // Create an unready buffer for this frame number.
15214 buffers->second.emplace(frameNumber, Buffer());
15215 return OK;
15216}
15217
15218void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15219 const camera3_stream_buffer_t &buffer)
15220{
15221 std::lock_guard<std::mutex> lock(mLock);
15222
15223 // Find the frame number -> buffer map for the stream.
15224 auto buffers = mStreamBuffers.find(buffer.stream);
15225 if (buffers == mStreamBuffers.end()) {
15226 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15227 return;
15228 }
15229
15230 // Find the unready buffer this frame number and mark it ready.
15231 auto pendingBuffer = buffers->second.find(frameNumber);
15232 if (pendingBuffer == buffers->second.end()) {
15233 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15234 return;
15235 }
15236
15237 pendingBuffer->second.ready = true;
15238 pendingBuffer->second.buffer = buffer;
15239
15240 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15241 pendingBuffer = buffers->second.begin();
15242 while (pendingBuffer != buffers->second.end()) {
15243 if (!pendingBuffer->second.ready) {
15244 // If this buffer is not ready, the following buffers can't be sent.
15245 break;
15246 }
15247
15248 camera3_capture_result_t result = {};
15249 result.frame_number = pendingBuffer->first;
15250 result.num_output_buffers = 1;
15251 result.output_buffers = &pendingBuffer->second.buffer;
15252
15253 // Send out result with buffer errors.
15254 mParent->orchestrateResult(&result);
15255
15256 pendingBuffer = buffers->second.erase(pendingBuffer);
15257 }
15258}
15259
15260void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15261{
15262 std::lock_guard<std::mutex> lock(mLock);
15263
15264 // Log errors for stale buffers.
15265 for (auto &buffers : mStreamBuffers) {
15266 for (auto &buffer : buffers.second) {
15267 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15268 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15269 }
15270 buffers.second.clear();
15271 }
15272
15273 if (clearConfiguredStreams) {
15274 mStreamBuffers.clear();
15275 }
15276}
15277
Thierry Strudel3d639192016-09-09 11:52:26 -070015278}; //end namespace qcamera