blob: cbc0abac34b12c1dbcbf2d4b7621f2a7afaff36e [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
Eino-Ville Talvala0b3fcb02017-07-13 16:52:32 -0700100#define MISSING_REQUEST_BUF_TIMEOUT 10
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700136// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
137#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
138
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700139// Whether to check for the GPU stride padding, or use the default
140//#define CHECK_GPU_PIXEL_ALIGNMENT
141
Thierry Strudel3d639192016-09-09 11:52:26 -0700142cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
143const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
144extern pthread_mutex_t gCamLock;
145volatile uint32_t gCamHal3LogLevel = 1;
146extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800148// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700150std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700151bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
152std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
153bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700154bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700155bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
160Mutex gHdrPlusClientLock; // Protect above Easel related variables.
161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
280};
281
282const QCamera3HardwareInterface::QCameraMap<
283 camera_metadata_enum_android_flash_mode_t,
284 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
285 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
286 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
287 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
288};
289
290const QCamera3HardwareInterface::QCameraMap<
291 camera_metadata_enum_android_statistics_face_detect_mode_t,
292 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
296};
297
298const QCamera3HardwareInterface::QCameraMap<
299 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
300 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
301 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
302 CAM_FOCUS_UNCALIBRATED },
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
304 CAM_FOCUS_APPROXIMATE },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
306 CAM_FOCUS_CALIBRATED }
307};
308
309const QCamera3HardwareInterface::QCameraMap<
310 camera_metadata_enum_android_lens_state_t,
311 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
312 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
313 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
314};
315
316const int32_t available_thumbnail_sizes[] = {0, 0,
317 176, 144,
318 240, 144,
319 256, 144,
320 240, 160,
321 256, 154,
322 240, 240,
323 320, 240};
324
325const QCamera3HardwareInterface::QCameraMap<
326 camera_metadata_enum_android_sensor_test_pattern_mode_t,
327 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
334};
335
336/* Since there is no mapping for all the options some Android enum are not listed.
337 * Also, the order in this list is important because while mapping from HAL to Android it will
338 * traverse from lower to higher index which means that for HAL values that are map to different
339 * Android values, the traverse logic will select the first one found.
340 */
341const QCamera3HardwareInterface::QCameraMap<
342 camera_metadata_enum_android_sensor_reference_illuminant1_t,
343 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
360};
361
362const QCamera3HardwareInterface::QCameraMap<
363 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
364 { 60, CAM_HFR_MODE_60FPS},
365 { 90, CAM_HFR_MODE_90FPS},
366 { 120, CAM_HFR_MODE_120FPS},
367 { 150, CAM_HFR_MODE_150FPS},
368 { 180, CAM_HFR_MODE_180FPS},
369 { 210, CAM_HFR_MODE_210FPS},
370 { 240, CAM_HFR_MODE_240FPS},
371 { 480, CAM_HFR_MODE_480FPS},
372};
373
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700374const QCamera3HardwareInterface::QCameraMap<
375 qcamera3_ext_instant_aec_mode_t,
376 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
377 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
380};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800381
382const QCamera3HardwareInterface::QCameraMap<
383 qcamera3_ext_exposure_meter_mode_t,
384 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
385 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
386 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
387 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
388 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
389 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
390 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
391 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
392};
393
394const QCamera3HardwareInterface::QCameraMap<
395 qcamera3_ext_iso_mode_t,
396 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
397 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
398 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
399 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
400 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
401 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
402 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
403 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
404 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
405};
406
Thierry Strudel3d639192016-09-09 11:52:26 -0700407camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
408 .initialize = QCamera3HardwareInterface::initialize,
409 .configure_streams = QCamera3HardwareInterface::configure_streams,
410 .register_stream_buffers = NULL,
411 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
412 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
413 .get_metadata_vendor_tag_ops = NULL,
414 .dump = QCamera3HardwareInterface::dump,
415 .flush = QCamera3HardwareInterface::flush,
416 .reserved = {0},
417};
418
419// initialise to some default value
420uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
421
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700422static inline void logEaselEvent(const char *tag, const char *event) {
423 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
424 struct timespec ts = {};
425 static int64_t kMsPerSec = 1000;
426 static int64_t kNsPerMs = 1000000;
427 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
428 if (res != OK) {
429 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
430 } else {
431 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
432 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
433 }
434 }
435}
436
Thierry Strudel3d639192016-09-09 11:52:26 -0700437/*===========================================================================
438 * FUNCTION : QCamera3HardwareInterface
439 *
440 * DESCRIPTION: constructor of QCamera3HardwareInterface
441 *
442 * PARAMETERS :
443 * @cameraId : camera ID
444 *
445 * RETURN : none
446 *==========================================================================*/
447QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
448 const camera_module_callbacks_t *callbacks)
449 : mCameraId(cameraId),
450 mCameraHandle(NULL),
451 mCameraInitialized(false),
452 mCallbackOps(NULL),
453 mMetadataChannel(NULL),
454 mPictureChannel(NULL),
455 mRawChannel(NULL),
456 mSupportChannel(NULL),
457 mAnalysisChannel(NULL),
458 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700459 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700460 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800461 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100462 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800463 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700464 mChannelHandle(0),
465 mFirstConfiguration(true),
466 mFlush(false),
467 mFlushPerf(false),
468 mParamHeap(NULL),
469 mParameters(NULL),
470 mPrevParameters(NULL),
471 m_bIsVideo(false),
472 m_bIs4KVideo(false),
473 m_bEisSupportedSize(false),
474 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800475 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700476 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700477 mShutterDispatcher(this),
478 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 mMinProcessedFrameDuration(0),
480 mMinJpegFrameDuration(0),
481 mMinRawFrameDuration(0),
482 mMetaFrameCount(0U),
483 mUpdateDebugLevel(false),
484 mCallbacks(callbacks),
485 mCaptureIntent(0),
486 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700487 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800488 /* DevCamDebug metadata internal m control*/
489 mDevCamDebugMetaEnable(0),
490 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700491 mBatchSize(0),
492 mToBeQueuedVidBufs(0),
493 mHFRVideoFps(DEFAULT_VIDEO_FPS),
494 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800495 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800496 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700497 mFirstFrameNumberInBatch(0),
498 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800499 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700500 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
501 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000502 mPDSupported(false),
503 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700504 mInstantAEC(false),
505 mResetInstantAEC(false),
506 mInstantAECSettledFrameNumber(0),
507 mAecSkipDisplayFrameBound(0),
508 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800509 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700512 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mState(CLOSED),
514 mIsDeviceLinked(false),
515 mIsMainCamera(true),
516 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700517 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800518 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800519 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700520 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800521 mIsApInputUsedForHdrPlus(false),
522 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700523 m_bSensorHDREnabled(false),
524 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700525{
526 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCommon.init(gCamCapability[cameraId]);
528 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700529#ifndef USE_HAL_3_3
530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
531#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700533#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCameraDevice.common.close = close_camera_device;
535 mCameraDevice.ops = &mCameraOps;
536 mCameraDevice.priv = this;
537 gCamCapability[cameraId]->version = CAM_HAL_V3;
538 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
539 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
540 gCamCapability[cameraId]->min_num_pp_bufs = 3;
541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800544 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700545 mPendingLiveRequest = 0;
546 mCurrentRequestId = -1;
547 pthread_mutex_init(&mMutex, NULL);
548
549 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
550 mDefaultMetadata[i] = NULL;
551
552 // Getting system props of different kinds
553 char prop[PROPERTY_VALUE_MAX];
554 memset(prop, 0, sizeof(prop));
555 property_get("persist.camera.raw.dump", prop, "0");
556 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800557 property_get("persist.camera.hal3.force.hdr", prop, "0");
558 mForceHdrSnapshot = atoi(prop);
559
Thierry Strudel3d639192016-09-09 11:52:26 -0700560 if (mEnableRawDump)
561 LOGD("Raw dump from Camera HAL enabled");
562
563 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
564 memset(mLdafCalib, 0, sizeof(mLdafCalib));
565
566 memset(prop, 0, sizeof(prop));
567 property_get("persist.camera.tnr.preview", prop, "0");
568 m_bTnrPreview = (uint8_t)atoi(prop);
569
570 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800571 property_get("persist.camera.swtnr.preview", prop, "1");
572 m_bSwTnrPreview = (uint8_t)atoi(prop);
573
574 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700575 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 m_bTnrVideo = (uint8_t)atoi(prop);
577
578 memset(prop, 0, sizeof(prop));
579 property_get("persist.camera.avtimer.debug", prop, "0");
580 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800581 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700582
Thierry Strudel54dc9782017-02-15 12:12:10 -0800583 memset(prop, 0, sizeof(prop));
584 property_get("persist.camera.cacmode.disable", prop, "0");
585 m_cacModeDisabled = (uint8_t)atoi(prop);
586
Thierry Strudel3d639192016-09-09 11:52:26 -0700587 //Load and read GPU library.
588 lib_surface_utils = NULL;
589 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700590 mSurfaceStridePadding = CAM_PAD_TO_64;
591#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700592 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
593 if (lib_surface_utils) {
594 *(void **)&LINK_get_surface_pixel_alignment =
595 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
596 if (LINK_get_surface_pixel_alignment) {
597 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
598 }
599 dlclose(lib_surface_utils);
600 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700601#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000602 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
603 mPDSupported = (0 <= mPDIndex) ? true : false;
604
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700605 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700606}
607
608/*===========================================================================
609 * FUNCTION : ~QCamera3HardwareInterface
610 *
611 * DESCRIPTION: destructor of QCamera3HardwareInterface
612 *
613 * PARAMETERS : none
614 *
615 * RETURN : none
616 *==========================================================================*/
617QCamera3HardwareInterface::~QCamera3HardwareInterface()
618{
619 LOGD("E");
620
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800621 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700622
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800623 // Disable power hint and enable the perf lock for close camera
624 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
625 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
626
627 // unlink of dualcam during close camera
628 if (mIsDeviceLinked) {
629 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
630 &m_pDualCamCmdPtr->bundle_info;
631 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
632 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
633 pthread_mutex_lock(&gCamLock);
634
635 if (mIsMainCamera == 1) {
636 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
637 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
638 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
639 // related session id should be session id of linked session
640 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
641 } else {
642 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
643 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
644 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
645 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
646 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800647 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800648 pthread_mutex_unlock(&gCamLock);
649
650 rc = mCameraHandle->ops->set_dual_cam_cmd(
651 mCameraHandle->camera_handle);
652 if (rc < 0) {
653 LOGE("Dualcam: Unlink failed, but still proceed to close");
654 }
655 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700656
657 /* We need to stop all streams before deleting any stream */
658 if (mRawDumpChannel) {
659 mRawDumpChannel->stop();
660 }
661
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700662 if (mHdrPlusRawSrcChannel) {
663 mHdrPlusRawSrcChannel->stop();
664 }
665
Thierry Strudel3d639192016-09-09 11:52:26 -0700666 // NOTE: 'camera3_stream_t *' objects are already freed at
667 // this stage by the framework
668 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
669 it != mStreamInfo.end(); it++) {
670 QCamera3ProcessingChannel *channel = (*it)->channel;
671 if (channel) {
672 channel->stop();
673 }
674 }
675 if (mSupportChannel)
676 mSupportChannel->stop();
677
678 if (mAnalysisChannel) {
679 mAnalysisChannel->stop();
680 }
681 if (mMetadataChannel) {
682 mMetadataChannel->stop();
683 }
684 if (mChannelHandle) {
685 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700686 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700687 LOGD("stopping channel %d", mChannelHandle);
688 }
689
690 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
691 it != mStreamInfo.end(); it++) {
692 QCamera3ProcessingChannel *channel = (*it)->channel;
693 if (channel)
694 delete channel;
695 free (*it);
696 }
697 if (mSupportChannel) {
698 delete mSupportChannel;
699 mSupportChannel = NULL;
700 }
701
702 if (mAnalysisChannel) {
703 delete mAnalysisChannel;
704 mAnalysisChannel = NULL;
705 }
706 if (mRawDumpChannel) {
707 delete mRawDumpChannel;
708 mRawDumpChannel = NULL;
709 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700710 if (mHdrPlusRawSrcChannel) {
711 delete mHdrPlusRawSrcChannel;
712 mHdrPlusRawSrcChannel = NULL;
713 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700714 if (mDummyBatchChannel) {
715 delete mDummyBatchChannel;
716 mDummyBatchChannel = NULL;
717 }
718
719 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800720 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700721
722 if (mMetadataChannel) {
723 delete mMetadataChannel;
724 mMetadataChannel = NULL;
725 }
726
727 /* Clean up all channels */
728 if (mCameraInitialized) {
729 if(!mFirstConfiguration){
730 //send the last unconfigure
731 cam_stream_size_info_t stream_config_info;
732 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
733 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
734 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800735 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700736 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700737 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
739 stream_config_info);
740 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
741 if (rc < 0) {
742 LOGE("set_parms failed for unconfigure");
743 }
744 }
745 deinitParameters();
746 }
747
748 if (mChannelHandle) {
749 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
750 mChannelHandle);
751 LOGH("deleting channel %d", mChannelHandle);
752 mChannelHandle = 0;
753 }
754
755 if (mState != CLOSED)
756 closeCamera();
757
758 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
759 req.mPendingBufferList.clear();
760 }
761 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 for (pendingRequestIterator i = mPendingRequestsList.begin();
763 i != mPendingRequestsList.end();) {
764 i = erasePendingRequest(i);
765 }
766 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
767 if (mDefaultMetadata[i])
768 free_camera_metadata(mDefaultMetadata[i]);
769
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800770 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700771
772 pthread_cond_destroy(&mRequestCond);
773
774 pthread_cond_destroy(&mBuffersCond);
775
776 pthread_mutex_destroy(&mMutex);
777 LOGD("X");
778}
779
780/*===========================================================================
781 * FUNCTION : erasePendingRequest
782 *
783 * DESCRIPTION: function to erase a desired pending request after freeing any
784 * allocated memory
785 *
786 * PARAMETERS :
787 * @i : iterator pointing to pending request to be erased
788 *
789 * RETURN : iterator pointing to the next request
790 *==========================================================================*/
791QCamera3HardwareInterface::pendingRequestIterator
792 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
793{
794 if (i->input_buffer != NULL) {
795 free(i->input_buffer);
796 i->input_buffer = NULL;
797 }
798 if (i->settings != NULL)
799 free_camera_metadata((camera_metadata_t*)i->settings);
800 return mPendingRequestsList.erase(i);
801}
802
803/*===========================================================================
804 * FUNCTION : camEvtHandle
805 *
806 * DESCRIPTION: Function registered to mm-camera-interface to handle events
807 *
808 * PARAMETERS :
809 * @camera_handle : interface layer camera handle
810 * @evt : ptr to event
811 * @user_data : user data ptr
812 *
813 * RETURN : none
814 *==========================================================================*/
815void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
816 mm_camera_event_t *evt,
817 void *user_data)
818{
819 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
820 if (obj && evt) {
821 switch(evt->server_event_type) {
822 case CAM_EVENT_TYPE_DAEMON_DIED:
823 pthread_mutex_lock(&obj->mMutex);
824 obj->mState = ERROR;
825 pthread_mutex_unlock(&obj->mMutex);
826 LOGE("Fatal, camera daemon died");
827 break;
828
829 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
830 LOGD("HAL got request pull from Daemon");
831 pthread_mutex_lock(&obj->mMutex);
832 obj->mWokenUpByDaemon = true;
833 obj->unblockRequestIfNecessary();
834 pthread_mutex_unlock(&obj->mMutex);
835 break;
836
837 default:
838 LOGW("Warning: Unhandled event %d",
839 evt->server_event_type);
840 break;
841 }
842 } else {
843 LOGE("NULL user_data/evt");
844 }
845}
846
847/*===========================================================================
848 * FUNCTION : openCamera
849 *
850 * DESCRIPTION: open camera
851 *
852 * PARAMETERS :
853 * @hw_device : double ptr for camera device struct
854 *
855 * RETURN : int32_t type of status
856 * NO_ERROR -- success
857 * none-zero failure code
858 *==========================================================================*/
859int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
860{
861 int rc = 0;
862 if (mState != CLOSED) {
863 *hw_device = NULL;
864 return PERMISSION_DENIED;
865 }
866
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700867 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800868 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
870 mCameraId);
871
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700872 if (mCameraHandle) {
873 LOGE("Failure: Camera already opened");
874 return ALREADY_EXISTS;
875 }
876
877 {
878 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700879 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700880 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen933db802017-07-14 14:31:53 -0700881 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700882 if (rc != 0) {
883 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
884 return rc;
885 }
886 }
887 }
888
Thierry Strudel3d639192016-09-09 11:52:26 -0700889 rc = openCamera();
890 if (rc == 0) {
891 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800892 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700893 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700894
895 // Suspend Easel because opening camera failed.
896 {
897 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700898 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
899 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700900 if (suspendErr != 0) {
901 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
902 strerror(-suspendErr), suspendErr);
903 }
904 }
905 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800906 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700907
Thierry Strudel3d639192016-09-09 11:52:26 -0700908 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
909 mCameraId, rc);
910
911 if (rc == NO_ERROR) {
912 mState = OPENED;
913 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800914
Thierry Strudel3d639192016-09-09 11:52:26 -0700915 return rc;
916}
917
918/*===========================================================================
919 * FUNCTION : openCamera
920 *
921 * DESCRIPTION: open camera
922 *
923 * PARAMETERS : none
924 *
925 * RETURN : int32_t type of status
926 * NO_ERROR -- success
927 * none-zero failure code
928 *==========================================================================*/
929int QCamera3HardwareInterface::openCamera()
930{
931 int rc = 0;
932 char value[PROPERTY_VALUE_MAX];
933
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800934 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800935
Thierry Strudel3d639192016-09-09 11:52:26 -0700936 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
937 if (rc < 0) {
938 LOGE("Failed to reserve flash for camera id: %d",
939 mCameraId);
940 return UNKNOWN_ERROR;
941 }
942
943 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
944 if (rc) {
945 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
946 return rc;
947 }
948
949 if (!mCameraHandle) {
950 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
951 return -ENODEV;
952 }
953
954 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
955 camEvtHandle, (void *)this);
956
957 if (rc < 0) {
958 LOGE("Error, failed to register event callback");
959 /* Not closing camera here since it is already handled in destructor */
960 return FAILED_TRANSACTION;
961 }
962
963 mExifParams.debug_params =
964 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
965 if (mExifParams.debug_params) {
966 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
967 } else {
968 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
969 return NO_MEMORY;
970 }
971 mFirstConfiguration = true;
972
973 //Notify display HAL that a camera session is active.
974 //But avoid calling the same during bootup because camera service might open/close
975 //cameras at boot time during its initialization and display service will also internally
976 //wait for camera service to initialize first while calling this display API, resulting in a
977 //deadlock situation. Since boot time camera open/close calls are made only to fetch
978 //capabilities, no need of this display bw optimization.
979 //Use "service.bootanim.exit" property to know boot status.
980 property_get("service.bootanim.exit", value, "0");
981 if (atoi(value) == 1) {
982 pthread_mutex_lock(&gCamLock);
983 if (gNumCameraSessions++ == 0) {
984 setCameraLaunchStatus(true);
985 }
986 pthread_mutex_unlock(&gCamLock);
987 }
988
989 //fill the session id needed while linking dual cam
990 pthread_mutex_lock(&gCamLock);
991 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
992 &sessionId[mCameraId]);
993 pthread_mutex_unlock(&gCamLock);
994
995 if (rc < 0) {
996 LOGE("Error, failed to get sessiion id");
997 return UNKNOWN_ERROR;
998 } else {
999 //Allocate related cam sync buffer
1000 //this is needed for the payload that goes along with bundling cmd for related
1001 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001002 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1003 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001004 if(rc != OK) {
1005 rc = NO_MEMORY;
1006 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1007 return NO_MEMORY;
1008 }
1009
1010 //Map memory for related cam sync buffer
1011 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001012 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1013 m_pDualCamCmdHeap->getFd(0),
1014 sizeof(cam_dual_camera_cmd_info_t),
1015 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001016 if(rc < 0) {
1017 LOGE("Dualcam: failed to map Related cam sync buffer");
1018 rc = FAILED_TRANSACTION;
1019 return NO_MEMORY;
1020 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001021 m_pDualCamCmdPtr =
1022 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001023 }
1024
1025 LOGH("mCameraId=%d",mCameraId);
1026
1027 return NO_ERROR;
1028}
1029
1030/*===========================================================================
1031 * FUNCTION : closeCamera
1032 *
1033 * DESCRIPTION: close camera
1034 *
1035 * PARAMETERS : none
1036 *
1037 * RETURN : int32_t type of status
1038 * NO_ERROR -- success
1039 * none-zero failure code
1040 *==========================================================================*/
1041int QCamera3HardwareInterface::closeCamera()
1042{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001043 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001044 int rc = NO_ERROR;
1045 char value[PROPERTY_VALUE_MAX];
1046
1047 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1048 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001049
1050 // unmap memory for related cam sync buffer
1051 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001052 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 if (NULL != m_pDualCamCmdHeap) {
1054 m_pDualCamCmdHeap->deallocate();
1055 delete m_pDualCamCmdHeap;
1056 m_pDualCamCmdHeap = NULL;
1057 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001058 }
1059
Thierry Strudel3d639192016-09-09 11:52:26 -07001060 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1061 mCameraHandle = NULL;
1062
1063 //reset session id to some invalid id
1064 pthread_mutex_lock(&gCamLock);
1065 sessionId[mCameraId] = 0xDEADBEEF;
1066 pthread_mutex_unlock(&gCamLock);
1067
1068 //Notify display HAL that there is no active camera session
1069 //but avoid calling the same during bootup. Refer to openCamera
1070 //for more details.
1071 property_get("service.bootanim.exit", value, "0");
1072 if (atoi(value) == 1) {
1073 pthread_mutex_lock(&gCamLock);
1074 if (--gNumCameraSessions == 0) {
1075 setCameraLaunchStatus(false);
1076 }
1077 pthread_mutex_unlock(&gCamLock);
1078 }
1079
Thierry Strudel3d639192016-09-09 11:52:26 -07001080 if (mExifParams.debug_params) {
1081 free(mExifParams.debug_params);
1082 mExifParams.debug_params = NULL;
1083 }
1084 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1085 LOGW("Failed to release flash for camera id: %d",
1086 mCameraId);
1087 }
1088 mState = CLOSED;
1089 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1090 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001091
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001092 {
1093 Mutex::Autolock l(gHdrPlusClientLock);
1094 if (gHdrPlusClient != nullptr) {
1095 // Disable HDR+ mode.
1096 disableHdrPlusModeLocked();
1097 // Disconnect Easel if it's connected.
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001098 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001099 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001100 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001101
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001102 if (EaselManagerClientOpened) {
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001103 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001104 if (rc != 0) {
1105 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1106 }
1107
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001108 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001109 if (rc != 0) {
1110 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1111 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001112 }
1113 }
1114
Thierry Strudel3d639192016-09-09 11:52:26 -07001115 return rc;
1116}
1117
1118/*===========================================================================
1119 * FUNCTION : initialize
1120 *
1121 * DESCRIPTION: Initialize frameworks callback functions
1122 *
1123 * PARAMETERS :
1124 * @callback_ops : callback function to frameworks
1125 *
1126 * RETURN :
1127 *
1128 *==========================================================================*/
1129int QCamera3HardwareInterface::initialize(
1130 const struct camera3_callback_ops *callback_ops)
1131{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001132 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001133 int rc;
1134
1135 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1136 pthread_mutex_lock(&mMutex);
1137
1138 // Validate current state
1139 switch (mState) {
1140 case OPENED:
1141 /* valid state */
1142 break;
1143 default:
1144 LOGE("Invalid state %d", mState);
1145 rc = -ENODEV;
1146 goto err1;
1147 }
1148
1149 rc = initParameters();
1150 if (rc < 0) {
1151 LOGE("initParamters failed %d", rc);
1152 goto err1;
1153 }
1154 mCallbackOps = callback_ops;
1155
1156 mChannelHandle = mCameraHandle->ops->add_channel(
1157 mCameraHandle->camera_handle, NULL, NULL, this);
1158 if (mChannelHandle == 0) {
1159 LOGE("add_channel failed");
1160 rc = -ENOMEM;
1161 pthread_mutex_unlock(&mMutex);
1162 return rc;
1163 }
1164
1165 pthread_mutex_unlock(&mMutex);
1166 mCameraInitialized = true;
1167 mState = INITIALIZED;
1168 LOGI("X");
1169 return 0;
1170
1171err1:
1172 pthread_mutex_unlock(&mMutex);
1173 return rc;
1174}
1175
1176/*===========================================================================
1177 * FUNCTION : validateStreamDimensions
1178 *
1179 * DESCRIPTION: Check if the configuration requested are those advertised
1180 *
1181 * PARAMETERS :
1182 * @stream_list : streams to be configured
1183 *
1184 * RETURN :
1185 *
1186 *==========================================================================*/
1187int QCamera3HardwareInterface::validateStreamDimensions(
1188 camera3_stream_configuration_t *streamList)
1189{
1190 int rc = NO_ERROR;
1191 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001192 uint32_t depthWidth = 0;
1193 uint32_t depthHeight = 0;
1194 if (mPDSupported) {
1195 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1196 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1197 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001198
1199 camera3_stream_t *inputStream = NULL;
1200 /*
1201 * Loop through all streams to find input stream if it exists*
1202 */
1203 for (size_t i = 0; i< streamList->num_streams; i++) {
1204 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1205 if (inputStream != NULL) {
1206 LOGE("Error, Multiple input streams requested");
1207 return -EINVAL;
1208 }
1209 inputStream = streamList->streams[i];
1210 }
1211 }
1212 /*
1213 * Loop through all streams requested in configuration
1214 * Check if unsupported sizes have been requested on any of them
1215 */
1216 for (size_t j = 0; j < streamList->num_streams; j++) {
1217 bool sizeFound = false;
1218 camera3_stream_t *newStream = streamList->streams[j];
1219
1220 uint32_t rotatedHeight = newStream->height;
1221 uint32_t rotatedWidth = newStream->width;
1222 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1223 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1224 rotatedHeight = newStream->width;
1225 rotatedWidth = newStream->height;
1226 }
1227
1228 /*
1229 * Sizes are different for each type of stream format check against
1230 * appropriate table.
1231 */
1232 switch (newStream->format) {
1233 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1234 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1235 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001236 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1237 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1238 mPDSupported) {
1239 if ((depthWidth == newStream->width) &&
1240 (depthHeight == newStream->height)) {
1241 sizeFound = true;
1242 }
1243 break;
1244 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001245 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1246 for (size_t i = 0; i < count; i++) {
1247 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1248 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1249 sizeFound = true;
1250 break;
1251 }
1252 }
1253 break;
1254 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1256 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001257 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001258 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001259 if ((depthSamplesCount == newStream->width) &&
1260 (1 == newStream->height)) {
1261 sizeFound = true;
1262 }
1263 break;
1264 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001265 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1266 /* Verify set size against generated sizes table */
1267 for (size_t i = 0; i < count; i++) {
1268 if (((int32_t)rotatedWidth ==
1269 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1270 ((int32_t)rotatedHeight ==
1271 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1272 sizeFound = true;
1273 break;
1274 }
1275 }
1276 break;
1277 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1278 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1279 default:
1280 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1281 || newStream->stream_type == CAMERA3_STREAM_INPUT
1282 || IS_USAGE_ZSL(newStream->usage)) {
1283 if (((int32_t)rotatedWidth ==
1284 gCamCapability[mCameraId]->active_array_size.width) &&
1285 ((int32_t)rotatedHeight ==
1286 gCamCapability[mCameraId]->active_array_size.height)) {
1287 sizeFound = true;
1288 break;
1289 }
1290 /* We could potentially break here to enforce ZSL stream
1291 * set from frameworks always is full active array size
1292 * but it is not clear from the spc if framework will always
1293 * follow that, also we have logic to override to full array
1294 * size, so keeping the logic lenient at the moment
1295 */
1296 }
1297 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1298 MAX_SIZES_CNT);
1299 for (size_t i = 0; i < count; i++) {
1300 if (((int32_t)rotatedWidth ==
1301 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1302 ((int32_t)rotatedHeight ==
1303 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1304 sizeFound = true;
1305 break;
1306 }
1307 }
1308 break;
1309 } /* End of switch(newStream->format) */
1310
1311 /* We error out even if a single stream has unsupported size set */
1312 if (!sizeFound) {
1313 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1314 rotatedWidth, rotatedHeight, newStream->format,
1315 gCamCapability[mCameraId]->active_array_size.width,
1316 gCamCapability[mCameraId]->active_array_size.height);
1317 rc = -EINVAL;
1318 break;
1319 }
1320 } /* End of for each stream */
1321 return rc;
1322}
1323
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001324/*===========================================================================
1325 * FUNCTION : validateUsageFlags
1326 *
1327 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1328 *
1329 * PARAMETERS :
1330 * @stream_list : streams to be configured
1331 *
1332 * RETURN :
1333 * NO_ERROR if the usage flags are supported
1334 * error code if usage flags are not supported
1335 *
1336 *==========================================================================*/
1337int QCamera3HardwareInterface::validateUsageFlags(
1338 const camera3_stream_configuration_t* streamList)
1339{
1340 for (size_t j = 0; j < streamList->num_streams; j++) {
1341 const camera3_stream_t *newStream = streamList->streams[j];
1342
1343 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1344 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1345 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1346 continue;
1347 }
1348
Jason Leec4cf5032017-05-24 18:31:41 -07001349 // Here we only care whether it's EIS3 or not
1350 char is_type_value[PROPERTY_VALUE_MAX];
1351 property_get("persist.camera.is_type", is_type_value, "4");
1352 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1353 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1354 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1355 isType = IS_TYPE_NONE;
1356
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001357 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1358 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1359 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1360 bool forcePreviewUBWC = true;
1361 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1362 forcePreviewUBWC = false;
1363 }
1364 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001365 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001366 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001367 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001369 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001370
1371 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1372 // So color spaces will always match.
1373
1374 // Check whether underlying formats of shared streams match.
1375 if (isVideo && isPreview && videoFormat != previewFormat) {
1376 LOGE("Combined video and preview usage flag is not supported");
1377 return -EINVAL;
1378 }
1379 if (isPreview && isZSL && previewFormat != zslFormat) {
1380 LOGE("Combined preview and zsl usage flag is not supported");
1381 return -EINVAL;
1382 }
1383 if (isVideo && isZSL && videoFormat != zslFormat) {
1384 LOGE("Combined video and zsl usage flag is not supported");
1385 return -EINVAL;
1386 }
1387 }
1388 return NO_ERROR;
1389}
1390
1391/*===========================================================================
1392 * FUNCTION : validateUsageFlagsForEis
1393 *
1394 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 *
1399 * RETURN :
1400 * NO_ERROR if the usage flags are supported
1401 * error code if usage flags are not supported
1402 *
1403 *==========================================================================*/
1404int QCamera3HardwareInterface::validateUsageFlagsForEis(
1405 const camera3_stream_configuration_t* streamList)
1406{
1407 for (size_t j = 0; j < streamList->num_streams; j++) {
1408 const camera3_stream_t *newStream = streamList->streams[j];
1409
1410 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1411 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1412
1413 // Because EIS is "hard-coded" for certain use case, and current
1414 // implementation doesn't support shared preview and video on the same
1415 // stream, return failure if EIS is forced on.
1416 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1417 LOGE("Combined video and preview usage flag is not supported due to EIS");
1418 return -EINVAL;
1419 }
1420 }
1421 return NO_ERROR;
1422}
1423
Thierry Strudel3d639192016-09-09 11:52:26 -07001424/*==============================================================================
1425 * FUNCTION : isSupportChannelNeeded
1426 *
1427 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1428 *
1429 * PARAMETERS :
1430 * @stream_list : streams to be configured
1431 * @stream_config_info : the config info for streams to be configured
1432 *
1433 * RETURN : Boolen true/false decision
1434 *
1435 *==========================================================================*/
1436bool QCamera3HardwareInterface::isSupportChannelNeeded(
1437 camera3_stream_configuration_t *streamList,
1438 cam_stream_size_info_t stream_config_info)
1439{
1440 uint32_t i;
1441 bool pprocRequested = false;
1442 /* Check for conditions where PProc pipeline does not have any streams*/
1443 for (i = 0; i < stream_config_info.num_streams; i++) {
1444 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1445 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1446 pprocRequested = true;
1447 break;
1448 }
1449 }
1450
1451 if (pprocRequested == false )
1452 return true;
1453
1454 /* Dummy stream needed if only raw or jpeg streams present */
1455 for (i = 0; i < streamList->num_streams; i++) {
1456 switch(streamList->streams[i]->format) {
1457 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1458 case HAL_PIXEL_FORMAT_RAW10:
1459 case HAL_PIXEL_FORMAT_RAW16:
1460 case HAL_PIXEL_FORMAT_BLOB:
1461 break;
1462 default:
1463 return false;
1464 }
1465 }
1466 return true;
1467}
1468
1469/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001470 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001471 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001472 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001473 *
1474 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001475 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001476 *
1477 * RETURN : int32_t type of status
1478 * NO_ERROR -- success
1479 * none-zero failure code
1480 *
1481 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001482int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001483{
1484 int32_t rc = NO_ERROR;
1485
1486 cam_dimension_t max_dim = {0, 0};
1487 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1488 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1489 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1490 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1491 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1492 }
1493
1494 clear_metadata_buffer(mParameters);
1495
1496 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1497 max_dim);
1498 if (rc != NO_ERROR) {
1499 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1500 return rc;
1501 }
1502
1503 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1504 if (rc != NO_ERROR) {
1505 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1506 return rc;
1507 }
1508
1509 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001510 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001511
1512 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1513 mParameters);
1514 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001515 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001516 return rc;
1517 }
1518
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001519 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001520 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1521 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1522 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1523 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1524 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001525
1526 return rc;
1527}
1528
1529/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001530 * FUNCTION : getCurrentSensorModeInfo
1531 *
1532 * DESCRIPTION: Get sensor mode information that is currently selected.
1533 *
1534 * PARAMETERS :
1535 * @sensorModeInfo : sensor mode information (output)
1536 *
1537 * RETURN : int32_t type of status
1538 * NO_ERROR -- success
1539 * none-zero failure code
1540 *
1541 *==========================================================================*/
1542int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1543{
1544 int32_t rc = NO_ERROR;
1545
1546 clear_metadata_buffer(mParameters);
1547 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1548
1549 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1550 mParameters);
1551 if (rc != NO_ERROR) {
1552 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1553 return rc;
1554 }
1555
1556 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1557 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1558 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1559 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1560 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1561 sensorModeInfo.num_raw_bits);
1562
1563 return rc;
1564}
1565
1566/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001567 * FUNCTION : addToPPFeatureMask
1568 *
1569 * DESCRIPTION: add additional features to pp feature mask based on
1570 * stream type and usecase
1571 *
1572 * PARAMETERS :
1573 * @stream_format : stream type for feature mask
1574 * @stream_idx : stream idx within postprocess_mask list to change
1575 *
1576 * RETURN : NULL
1577 *
1578 *==========================================================================*/
1579void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1580 uint32_t stream_idx)
1581{
1582 char feature_mask_value[PROPERTY_VALUE_MAX];
1583 cam_feature_mask_t feature_mask;
1584 int args_converted;
1585 int property_len;
1586
1587 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001588#ifdef _LE_CAMERA_
1589 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1590 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1591 property_len = property_get("persist.camera.hal3.feature",
1592 feature_mask_value, swtnr_feature_mask_value);
1593#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001594 property_len = property_get("persist.camera.hal3.feature",
1595 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001596#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001597 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1598 (feature_mask_value[1] == 'x')) {
1599 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1600 } else {
1601 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1602 }
1603 if (1 != args_converted) {
1604 feature_mask = 0;
1605 LOGE("Wrong feature mask %s", feature_mask_value);
1606 return;
1607 }
1608
1609 switch (stream_format) {
1610 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1611 /* Add LLVD to pp feature mask only if video hint is enabled */
1612 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1613 mStreamConfigInfo.postprocess_mask[stream_idx]
1614 |= CAM_QTI_FEATURE_SW_TNR;
1615 LOGH("Added SW TNR to pp feature mask");
1616 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1617 mStreamConfigInfo.postprocess_mask[stream_idx]
1618 |= CAM_QCOM_FEATURE_LLVD;
1619 LOGH("Added LLVD SeeMore to pp feature mask");
1620 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001621 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1622 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1623 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1624 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001625 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1626 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1627 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1628 CAM_QTI_FEATURE_BINNING_CORRECTION;
1629 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001630 break;
1631 }
1632 default:
1633 break;
1634 }
1635 LOGD("PP feature mask %llx",
1636 mStreamConfigInfo.postprocess_mask[stream_idx]);
1637}
1638
1639/*==============================================================================
1640 * FUNCTION : updateFpsInPreviewBuffer
1641 *
1642 * DESCRIPTION: update FPS information in preview buffer.
1643 *
1644 * PARAMETERS :
1645 * @metadata : pointer to metadata buffer
1646 * @frame_number: frame_number to look for in pending buffer list
1647 *
1648 * RETURN : None
1649 *
1650 *==========================================================================*/
1651void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1652 uint32_t frame_number)
1653{
1654 // Mark all pending buffers for this particular request
1655 // with corresponding framerate information
1656 for (List<PendingBuffersInRequest>::iterator req =
1657 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1658 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1659 for(List<PendingBufferInfo>::iterator j =
1660 req->mPendingBufferList.begin();
1661 j != req->mPendingBufferList.end(); j++) {
1662 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1663 if ((req->frame_number == frame_number) &&
1664 (channel->getStreamTypeMask() &
1665 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1666 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1667 CAM_INTF_PARM_FPS_RANGE, metadata) {
1668 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1669 struct private_handle_t *priv_handle =
1670 (struct private_handle_t *)(*(j->buffer));
1671 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1672 }
1673 }
1674 }
1675 }
1676}
1677
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001678/*==============================================================================
1679 * FUNCTION : updateTimeStampInPendingBuffers
1680 *
1681 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1682 * of a frame number
1683 *
1684 * PARAMETERS :
1685 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1686 * @timestamp : timestamp to be set
1687 *
1688 * RETURN : None
1689 *
1690 *==========================================================================*/
1691void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1692 uint32_t frameNumber, nsecs_t timestamp)
1693{
1694 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1695 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1696 if (req->frame_number != frameNumber)
1697 continue;
1698
1699 for (auto k = req->mPendingBufferList.begin();
1700 k != req->mPendingBufferList.end(); k++ ) {
1701 struct private_handle_t *priv_handle =
1702 (struct private_handle_t *) (*(k->buffer));
1703 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1704 }
1705 }
1706 return;
1707}
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709/*===========================================================================
1710 * FUNCTION : configureStreams
1711 *
1712 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1713 * and output streams.
1714 *
1715 * PARAMETERS :
1716 * @stream_list : streams to be configured
1717 *
1718 * RETURN :
1719 *
1720 *==========================================================================*/
1721int QCamera3HardwareInterface::configureStreams(
1722 camera3_stream_configuration_t *streamList)
1723{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001724 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001725 int rc = 0;
1726
1727 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001728 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001729 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001730 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001731
1732 return rc;
1733}
1734
1735/*===========================================================================
1736 * FUNCTION : configureStreamsPerfLocked
1737 *
1738 * DESCRIPTION: configureStreams while perfLock is held.
1739 *
1740 * PARAMETERS :
1741 * @stream_list : streams to be configured
1742 *
1743 * RETURN : int32_t type of status
1744 * NO_ERROR -- success
1745 * none-zero failure code
1746 *==========================================================================*/
1747int QCamera3HardwareInterface::configureStreamsPerfLocked(
1748 camera3_stream_configuration_t *streamList)
1749{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001750 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001751 int rc = 0;
1752
1753 // Sanity check stream_list
1754 if (streamList == NULL) {
1755 LOGE("NULL stream configuration");
1756 return BAD_VALUE;
1757 }
1758 if (streamList->streams == NULL) {
1759 LOGE("NULL stream list");
1760 return BAD_VALUE;
1761 }
1762
1763 if (streamList->num_streams < 1) {
1764 LOGE("Bad number of streams requested: %d",
1765 streamList->num_streams);
1766 return BAD_VALUE;
1767 }
1768
1769 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1770 LOGE("Maximum number of streams %d exceeded: %d",
1771 MAX_NUM_STREAMS, streamList->num_streams);
1772 return BAD_VALUE;
1773 }
1774
Jason Leec4cf5032017-05-24 18:31:41 -07001775 mOpMode = streamList->operation_mode;
1776 LOGD("mOpMode: %d", mOpMode);
1777
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001778 rc = validateUsageFlags(streamList);
1779 if (rc != NO_ERROR) {
1780 return rc;
1781 }
1782
Thierry Strudel3d639192016-09-09 11:52:26 -07001783 /* first invalidate all the steams in the mStreamList
1784 * if they appear again, they will be validated */
1785 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1786 it != mStreamInfo.end(); it++) {
1787 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1788 if (channel) {
1789 channel->stop();
1790 }
1791 (*it)->status = INVALID;
1792 }
1793
1794 if (mRawDumpChannel) {
1795 mRawDumpChannel->stop();
1796 delete mRawDumpChannel;
1797 mRawDumpChannel = NULL;
1798 }
1799
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001800 if (mHdrPlusRawSrcChannel) {
1801 mHdrPlusRawSrcChannel->stop();
1802 delete mHdrPlusRawSrcChannel;
1803 mHdrPlusRawSrcChannel = NULL;
1804 }
1805
Thierry Strudel3d639192016-09-09 11:52:26 -07001806 if (mSupportChannel)
1807 mSupportChannel->stop();
1808
1809 if (mAnalysisChannel) {
1810 mAnalysisChannel->stop();
1811 }
1812 if (mMetadataChannel) {
1813 /* If content of mStreamInfo is not 0, there is metadata stream */
1814 mMetadataChannel->stop();
1815 }
1816 if (mChannelHandle) {
1817 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001818 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 LOGD("stopping channel %d", mChannelHandle);
1820 }
1821
1822 pthread_mutex_lock(&mMutex);
1823
1824 // Check state
1825 switch (mState) {
1826 case INITIALIZED:
1827 case CONFIGURED:
1828 case STARTED:
1829 /* valid state */
1830 break;
1831 default:
1832 LOGE("Invalid state %d", mState);
1833 pthread_mutex_unlock(&mMutex);
1834 return -ENODEV;
1835 }
1836
1837 /* Check whether we have video stream */
1838 m_bIs4KVideo = false;
1839 m_bIsVideo = false;
1840 m_bEisSupportedSize = false;
1841 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001842 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001843 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001844 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001845 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001846 uint32_t videoWidth = 0U;
1847 uint32_t videoHeight = 0U;
1848 size_t rawStreamCnt = 0;
1849 size_t stallStreamCnt = 0;
1850 size_t processedStreamCnt = 0;
1851 // Number of streams on ISP encoder path
1852 size_t numStreamsOnEncoder = 0;
1853 size_t numYuv888OnEncoder = 0;
1854 bool bYuv888OverrideJpeg = false;
1855 cam_dimension_t largeYuv888Size = {0, 0};
1856 cam_dimension_t maxViewfinderSize = {0, 0};
1857 bool bJpegExceeds4K = false;
1858 bool bJpegOnEncoder = false;
1859 bool bUseCommonFeatureMask = false;
1860 cam_feature_mask_t commonFeatureMask = 0;
1861 bool bSmallJpegSize = false;
1862 uint32_t width_ratio;
1863 uint32_t height_ratio;
1864 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1865 camera3_stream_t *inputStream = NULL;
1866 bool isJpeg = false;
1867 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001868 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001869 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001870
1871 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1872
1873 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001874 uint8_t eis_prop_set;
1875 uint32_t maxEisWidth = 0;
1876 uint32_t maxEisHeight = 0;
1877
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001878 // Initialize all instant AEC related variables
1879 mInstantAEC = false;
1880 mResetInstantAEC = false;
1881 mInstantAECSettledFrameNumber = 0;
1882 mAecSkipDisplayFrameBound = 0;
1883 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001884 mCurrFeatureState = 0;
1885 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001886
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1888
1889 size_t count = IS_TYPE_MAX;
1890 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1891 for (size_t i = 0; i < count; i++) {
1892 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001893 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1894 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001895 break;
1896 }
1897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001898
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001899 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001900 maxEisWidth = MAX_EIS_WIDTH;
1901 maxEisHeight = MAX_EIS_HEIGHT;
1902 }
1903
1904 /* EIS setprop control */
1905 char eis_prop[PROPERTY_VALUE_MAX];
1906 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001907 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001908 eis_prop_set = (uint8_t)atoi(eis_prop);
1909
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001910 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001911 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1912
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001913 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1914 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001915
Thierry Strudel3d639192016-09-09 11:52:26 -07001916 /* stream configurations */
1917 for (size_t i = 0; i < streamList->num_streams; i++) {
1918 camera3_stream_t *newStream = streamList->streams[i];
1919 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1920 "height = %d, rotation = %d, usage = 0x%x",
1921 i, newStream->stream_type, newStream->format,
1922 newStream->width, newStream->height, newStream->rotation,
1923 newStream->usage);
1924 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1925 newStream->stream_type == CAMERA3_STREAM_INPUT){
1926 isZsl = true;
1927 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001928 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1929 IS_USAGE_PREVIEW(newStream->usage)) {
1930 isPreview = true;
1931 }
1932
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1934 inputStream = newStream;
1935 }
1936
Emilian Peev7650c122017-01-19 08:24:33 -08001937 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1938 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001939 isJpeg = true;
1940 jpegSize.width = newStream->width;
1941 jpegSize.height = newStream->height;
1942 if (newStream->width > VIDEO_4K_WIDTH ||
1943 newStream->height > VIDEO_4K_HEIGHT)
1944 bJpegExceeds4K = true;
1945 }
1946
1947 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1948 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1949 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001950 // In HAL3 we can have multiple different video streams.
1951 // The variables video width and height are used below as
1952 // dimensions of the biggest of them
1953 if (videoWidth < newStream->width ||
1954 videoHeight < newStream->height) {
1955 videoWidth = newStream->width;
1956 videoHeight = newStream->height;
1957 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001958 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1959 (VIDEO_4K_HEIGHT <= newStream->height)) {
1960 m_bIs4KVideo = true;
1961 }
1962 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1963 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001964
Thierry Strudel3d639192016-09-09 11:52:26 -07001965 }
1966 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1967 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1968 switch (newStream->format) {
1969 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001970 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1971 depthPresent = true;
1972 break;
1973 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001974 stallStreamCnt++;
1975 if (isOnEncoder(maxViewfinderSize, newStream->width,
1976 newStream->height)) {
1977 numStreamsOnEncoder++;
1978 bJpegOnEncoder = true;
1979 }
1980 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1981 newStream->width);
1982 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1983 newStream->height);;
1984 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1985 "FATAL: max_downscale_factor cannot be zero and so assert");
1986 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1987 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1988 LOGH("Setting small jpeg size flag to true");
1989 bSmallJpegSize = true;
1990 }
1991 break;
1992 case HAL_PIXEL_FORMAT_RAW10:
1993 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1994 case HAL_PIXEL_FORMAT_RAW16:
1995 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001996 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1997 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1998 pdStatCount++;
1999 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002000 break;
2001 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2002 processedStreamCnt++;
2003 if (isOnEncoder(maxViewfinderSize, newStream->width,
2004 newStream->height)) {
2005 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2006 !IS_USAGE_ZSL(newStream->usage)) {
2007 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2008 }
2009 numStreamsOnEncoder++;
2010 }
2011 break;
2012 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2013 processedStreamCnt++;
2014 if (isOnEncoder(maxViewfinderSize, newStream->width,
2015 newStream->height)) {
2016 // If Yuv888 size is not greater than 4K, set feature mask
2017 // to SUPERSET so that it support concurrent request on
2018 // YUV and JPEG.
2019 if (newStream->width <= VIDEO_4K_WIDTH &&
2020 newStream->height <= VIDEO_4K_HEIGHT) {
2021 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2022 }
2023 numStreamsOnEncoder++;
2024 numYuv888OnEncoder++;
2025 largeYuv888Size.width = newStream->width;
2026 largeYuv888Size.height = newStream->height;
2027 }
2028 break;
2029 default:
2030 processedStreamCnt++;
2031 if (isOnEncoder(maxViewfinderSize, newStream->width,
2032 newStream->height)) {
2033 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2034 numStreamsOnEncoder++;
2035 }
2036 break;
2037 }
2038
2039 }
2040 }
2041
2042 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2043 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2044 !m_bIsVideo) {
2045 m_bEisEnable = false;
2046 }
2047
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002048 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2049 pthread_mutex_unlock(&mMutex);
2050 return -EINVAL;
2051 }
2052
Thierry Strudel54dc9782017-02-15 12:12:10 -08002053 uint8_t forceEnableTnr = 0;
2054 char tnr_prop[PROPERTY_VALUE_MAX];
2055 memset(tnr_prop, 0, sizeof(tnr_prop));
2056 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2057 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2058
Thierry Strudel3d639192016-09-09 11:52:26 -07002059 /* Logic to enable/disable TNR based on specific config size/etc.*/
2060 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002061 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2062 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002063 else if (forceEnableTnr)
2064 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002065
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002066 char videoHdrProp[PROPERTY_VALUE_MAX];
2067 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2068 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2069 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2070
2071 if (hdr_mode_prop == 1 && m_bIsVideo &&
2072 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2073 m_bVideoHdrEnabled = true;
2074 else
2075 m_bVideoHdrEnabled = false;
2076
2077
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 /* Check if num_streams is sane */
2079 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2080 rawStreamCnt > MAX_RAW_STREAMS ||
2081 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2082 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2083 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2084 pthread_mutex_unlock(&mMutex);
2085 return -EINVAL;
2086 }
2087 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002088 if (isZsl && m_bIs4KVideo) {
2089 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002090 pthread_mutex_unlock(&mMutex);
2091 return -EINVAL;
2092 }
2093 /* Check if stream sizes are sane */
2094 if (numStreamsOnEncoder > 2) {
2095 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2096 pthread_mutex_unlock(&mMutex);
2097 return -EINVAL;
2098 } else if (1 < numStreamsOnEncoder){
2099 bUseCommonFeatureMask = true;
2100 LOGH("Multiple streams above max viewfinder size, common mask needed");
2101 }
2102
2103 /* Check if BLOB size is greater than 4k in 4k recording case */
2104 if (m_bIs4KVideo && bJpegExceeds4K) {
2105 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2106 pthread_mutex_unlock(&mMutex);
2107 return -EINVAL;
2108 }
2109
Emilian Peev7650c122017-01-19 08:24:33 -08002110 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2111 depthPresent) {
2112 LOGE("HAL doesn't support depth streams in HFR mode!");
2113 pthread_mutex_unlock(&mMutex);
2114 return -EINVAL;
2115 }
2116
Thierry Strudel3d639192016-09-09 11:52:26 -07002117 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2118 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2119 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2120 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2121 // configurations:
2122 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2123 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2124 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2125 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2126 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2127 __func__);
2128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131
2132 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2133 // the YUV stream's size is greater or equal to the JPEG size, set common
2134 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2135 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2136 jpegSize.width, jpegSize.height) &&
2137 largeYuv888Size.width > jpegSize.width &&
2138 largeYuv888Size.height > jpegSize.height) {
2139 bYuv888OverrideJpeg = true;
2140 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2141 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2142 }
2143
2144 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2145 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2146 commonFeatureMask);
2147 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2148 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2149
2150 rc = validateStreamDimensions(streamList);
2151 if (rc == NO_ERROR) {
2152 rc = validateStreamRotations(streamList);
2153 }
2154 if (rc != NO_ERROR) {
2155 LOGE("Invalid stream configuration requested!");
2156 pthread_mutex_unlock(&mMutex);
2157 return rc;
2158 }
2159
Emilian Peev0f3c3162017-03-15 12:57:46 +00002160 if (1 < pdStatCount) {
2161 LOGE("HAL doesn't support multiple PD streams");
2162 pthread_mutex_unlock(&mMutex);
2163 return -EINVAL;
2164 }
2165
2166 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2167 (1 == pdStatCount)) {
2168 LOGE("HAL doesn't support PD streams in HFR mode!");
2169 pthread_mutex_unlock(&mMutex);
2170 return -EINVAL;
2171 }
2172
Thierry Strudel3d639192016-09-09 11:52:26 -07002173 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2174 for (size_t i = 0; i < streamList->num_streams; i++) {
2175 camera3_stream_t *newStream = streamList->streams[i];
2176 LOGH("newStream type = %d, stream format = %d "
2177 "stream size : %d x %d, stream rotation = %d",
2178 newStream->stream_type, newStream->format,
2179 newStream->width, newStream->height, newStream->rotation);
2180 //if the stream is in the mStreamList validate it
2181 bool stream_exists = false;
2182 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2183 it != mStreamInfo.end(); it++) {
2184 if ((*it)->stream == newStream) {
2185 QCamera3ProcessingChannel *channel =
2186 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2187 stream_exists = true;
2188 if (channel)
2189 delete channel;
2190 (*it)->status = VALID;
2191 (*it)->stream->priv = NULL;
2192 (*it)->channel = NULL;
2193 }
2194 }
2195 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2196 //new stream
2197 stream_info_t* stream_info;
2198 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2199 if (!stream_info) {
2200 LOGE("Could not allocate stream info");
2201 rc = -ENOMEM;
2202 pthread_mutex_unlock(&mMutex);
2203 return rc;
2204 }
2205 stream_info->stream = newStream;
2206 stream_info->status = VALID;
2207 stream_info->channel = NULL;
2208 mStreamInfo.push_back(stream_info);
2209 }
2210 /* Covers Opaque ZSL and API1 F/W ZSL */
2211 if (IS_USAGE_ZSL(newStream->usage)
2212 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2213 if (zslStream != NULL) {
2214 LOGE("Multiple input/reprocess streams requested!");
2215 pthread_mutex_unlock(&mMutex);
2216 return BAD_VALUE;
2217 }
2218 zslStream = newStream;
2219 }
2220 /* Covers YUV reprocess */
2221 if (inputStream != NULL) {
2222 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2223 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2224 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2225 && inputStream->width == newStream->width
2226 && inputStream->height == newStream->height) {
2227 if (zslStream != NULL) {
2228 /* This scenario indicates multiple YUV streams with same size
2229 * as input stream have been requested, since zsl stream handle
2230 * is solely use for the purpose of overriding the size of streams
2231 * which share h/w streams we will just make a guess here as to
2232 * which of the stream is a ZSL stream, this will be refactored
2233 * once we make generic logic for streams sharing encoder output
2234 */
2235 LOGH("Warning, Multiple ip/reprocess streams requested!");
2236 }
2237 zslStream = newStream;
2238 }
2239 }
2240 }
2241
2242 /* If a zsl stream is set, we know that we have configured at least one input or
2243 bidirectional stream */
2244 if (NULL != zslStream) {
2245 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2246 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2247 mInputStreamInfo.format = zslStream->format;
2248 mInputStreamInfo.usage = zslStream->usage;
2249 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2250 mInputStreamInfo.dim.width,
2251 mInputStreamInfo.dim.height,
2252 mInputStreamInfo.format, mInputStreamInfo.usage);
2253 }
2254
2255 cleanAndSortStreamInfo();
2256 if (mMetadataChannel) {
2257 delete mMetadataChannel;
2258 mMetadataChannel = NULL;
2259 }
2260 if (mSupportChannel) {
2261 delete mSupportChannel;
2262 mSupportChannel = NULL;
2263 }
2264
2265 if (mAnalysisChannel) {
2266 delete mAnalysisChannel;
2267 mAnalysisChannel = NULL;
2268 }
2269
2270 if (mDummyBatchChannel) {
2271 delete mDummyBatchChannel;
2272 mDummyBatchChannel = NULL;
2273 }
2274
Emilian Peev7650c122017-01-19 08:24:33 -08002275 if (mDepthChannel) {
2276 mDepthChannel = NULL;
2277 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002278 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002279
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002280 mShutterDispatcher.clear();
2281 mOutputBufferDispatcher.clear();
2282
Thierry Strudel2896d122017-02-23 19:18:03 -08002283 char is_type_value[PROPERTY_VALUE_MAX];
2284 property_get("persist.camera.is_type", is_type_value, "4");
2285 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2286
Binhao Line406f062017-05-03 14:39:44 -07002287 char property_value[PROPERTY_VALUE_MAX];
2288 property_get("persist.camera.gzoom.at", property_value, "0");
2289 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002290 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2291 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2292 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2293 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002294
2295 property_get("persist.camera.gzoom.4k", property_value, "0");
2296 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2297
Thierry Strudel3d639192016-09-09 11:52:26 -07002298 //Create metadata channel and initialize it
2299 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2300 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2301 gCamCapability[mCameraId]->color_arrangement);
2302 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2303 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002304 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 if (mMetadataChannel == NULL) {
2306 LOGE("failed to allocate metadata channel");
2307 rc = -ENOMEM;
2308 pthread_mutex_unlock(&mMutex);
2309 return rc;
2310 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002311 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002312 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2313 if (rc < 0) {
2314 LOGE("metadata channel initialization failed");
2315 delete mMetadataChannel;
2316 mMetadataChannel = NULL;
2317 pthread_mutex_unlock(&mMutex);
2318 return rc;
2319 }
2320
Thierry Strudel2896d122017-02-23 19:18:03 -08002321 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002322 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002323 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002324 // Keep track of preview/video streams indices.
2325 // There could be more than one preview streams, but only one video stream.
2326 int32_t video_stream_idx = -1;
2327 int32_t preview_stream_idx[streamList->num_streams];
2328 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002329 bool previewTnr[streamList->num_streams];
2330 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2331 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2332 // Loop through once to determine preview TNR conditions before creating channels.
2333 for (size_t i = 0; i < streamList->num_streams; i++) {
2334 camera3_stream_t *newStream = streamList->streams[i];
2335 uint32_t stream_usage = newStream->usage;
2336 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2337 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2338 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2339 video_stream_idx = (int32_t)i;
2340 else
2341 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2342 }
2343 }
2344 // By default, preview stream TNR is disabled.
2345 // Enable TNR to the preview stream if all conditions below are satisfied:
2346 // 1. preview resolution == video resolution.
2347 // 2. video stream TNR is enabled.
2348 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2349 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2350 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2351 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2352 if (m_bTnrEnabled && m_bTnrVideo &&
2353 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2354 video_stream->width == preview_stream->width &&
2355 video_stream->height == preview_stream->height) {
2356 previewTnr[preview_stream_idx[i]] = true;
2357 }
2358 }
2359
Thierry Strudel3d639192016-09-09 11:52:26 -07002360 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2361 /* Allocate channel objects for the requested streams */
2362 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002363
Thierry Strudel3d639192016-09-09 11:52:26 -07002364 camera3_stream_t *newStream = streamList->streams[i];
2365 uint32_t stream_usage = newStream->usage;
2366 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2367 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2368 struct camera_info *p_info = NULL;
2369 pthread_mutex_lock(&gCamLock);
2370 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2371 pthread_mutex_unlock(&gCamLock);
2372 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2373 || IS_USAGE_ZSL(newStream->usage)) &&
2374 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002375 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002376 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002377 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2378 if (bUseCommonFeatureMask)
2379 zsl_ppmask = commonFeatureMask;
2380 else
2381 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002382 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002383 if (numStreamsOnEncoder > 0)
2384 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2385 else
2386 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002387 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002388 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002389 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002390 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002391 LOGH("Input stream configured, reprocess config");
2392 } else {
2393 //for non zsl streams find out the format
2394 switch (newStream->format) {
2395 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2396 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002397 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2399 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2400 /* add additional features to pp feature mask */
2401 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2402 mStreamConfigInfo.num_streams);
2403
2404 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2405 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2406 CAM_STREAM_TYPE_VIDEO;
2407 if (m_bTnrEnabled && m_bTnrVideo) {
2408 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2409 CAM_QCOM_FEATURE_CPP_TNR;
2410 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2411 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2412 ~CAM_QCOM_FEATURE_CDS;
2413 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2415 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2416 CAM_QTI_FEATURE_PPEISCORE;
2417 }
Binhao Line406f062017-05-03 14:39:44 -07002418 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2419 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2420 CAM_QCOM_FEATURE_GOOG_ZOOM;
2421 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002422 } else {
2423 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2424 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002425 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2427 CAM_QCOM_FEATURE_CPP_TNR;
2428 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2430 ~CAM_QCOM_FEATURE_CDS;
2431 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002432 if(!m_bSwTnrPreview) {
2433 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2434 ~CAM_QTI_FEATURE_SW_TNR;
2435 }
Binhao Line406f062017-05-03 14:39:44 -07002436 if (is_goog_zoom_preview_enabled) {
2437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2438 CAM_QCOM_FEATURE_GOOG_ZOOM;
2439 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002440 padding_info.width_padding = mSurfaceStridePadding;
2441 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002442 previewSize.width = (int32_t)newStream->width;
2443 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002444 }
2445 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2446 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2447 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2448 newStream->height;
2449 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2450 newStream->width;
2451 }
2452 }
2453 break;
2454 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002455 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002456 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2457 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2458 if (bUseCommonFeatureMask)
2459 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2460 commonFeatureMask;
2461 else
2462 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2463 CAM_QCOM_FEATURE_NONE;
2464 } else {
2465 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2466 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2467 }
2468 break;
2469 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002470 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002471 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2472 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2473 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2474 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2475 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002476 /* Remove rotation if it is not supported
2477 for 4K LiveVideo snapshot case (online processing) */
2478 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2479 CAM_QCOM_FEATURE_ROTATION)) {
2480 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2481 &= ~CAM_QCOM_FEATURE_ROTATION;
2482 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002483 } else {
2484 if (bUseCommonFeatureMask &&
2485 isOnEncoder(maxViewfinderSize, newStream->width,
2486 newStream->height)) {
2487 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2488 } else {
2489 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2490 }
2491 }
2492 if (isZsl) {
2493 if (zslStream) {
2494 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2495 (int32_t)zslStream->width;
2496 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2497 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002500 } else {
2501 LOGE("Error, No ZSL stream identified");
2502 pthread_mutex_unlock(&mMutex);
2503 return -EINVAL;
2504 }
2505 } else if (m_bIs4KVideo) {
2506 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2507 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2508 } else if (bYuv888OverrideJpeg) {
2509 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2510 (int32_t)largeYuv888Size.width;
2511 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2512 (int32_t)largeYuv888Size.height;
2513 }
2514 break;
2515 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2516 case HAL_PIXEL_FORMAT_RAW16:
2517 case HAL_PIXEL_FORMAT_RAW10:
2518 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2519 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2520 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002521 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2522 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2523 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2524 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2525 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2526 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2527 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2528 gCamCapability[mCameraId]->dt[mPDIndex];
2529 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2530 gCamCapability[mCameraId]->vc[mPDIndex];
2531 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002532 break;
2533 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002534 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002535 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2536 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2537 break;
2538 }
2539 }
2540
2541 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2542 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2543 gCamCapability[mCameraId]->color_arrangement);
2544
2545 if (newStream->priv == NULL) {
2546 //New stream, construct channel
2547 switch (newStream->stream_type) {
2548 case CAMERA3_STREAM_INPUT:
2549 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2550 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2551 break;
2552 case CAMERA3_STREAM_BIDIRECTIONAL:
2553 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2554 GRALLOC_USAGE_HW_CAMERA_WRITE;
2555 break;
2556 case CAMERA3_STREAM_OUTPUT:
2557 /* For video encoding stream, set read/write rarely
2558 * flag so that they may be set to un-cached */
2559 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2560 newStream->usage |=
2561 (GRALLOC_USAGE_SW_READ_RARELY |
2562 GRALLOC_USAGE_SW_WRITE_RARELY |
2563 GRALLOC_USAGE_HW_CAMERA_WRITE);
2564 else if (IS_USAGE_ZSL(newStream->usage))
2565 {
2566 LOGD("ZSL usage flag skipping");
2567 }
2568 else if (newStream == zslStream
2569 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2570 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2571 } else
2572 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2573 break;
2574 default:
2575 LOGE("Invalid stream_type %d", newStream->stream_type);
2576 break;
2577 }
2578
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002579 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002580 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2581 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2582 QCamera3ProcessingChannel *channel = NULL;
2583 switch (newStream->format) {
2584 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2585 if ((newStream->usage &
2586 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2587 (streamList->operation_mode ==
2588 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2589 ) {
2590 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2591 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002592 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002593 this,
2594 newStream,
2595 (cam_stream_type_t)
2596 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2597 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2598 mMetadataChannel,
2599 0); //heap buffers are not required for HFR video channel
2600 if (channel == NULL) {
2601 LOGE("allocation of channel failed");
2602 pthread_mutex_unlock(&mMutex);
2603 return -ENOMEM;
2604 }
2605 //channel->getNumBuffers() will return 0 here so use
2606 //MAX_INFLIGH_HFR_REQUESTS
2607 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2608 newStream->priv = channel;
2609 LOGI("num video buffers in HFR mode: %d",
2610 MAX_INFLIGHT_HFR_REQUESTS);
2611 } else {
2612 /* Copy stream contents in HFR preview only case to create
2613 * dummy batch channel so that sensor streaming is in
2614 * HFR mode */
2615 if (!m_bIsVideo && (streamList->operation_mode ==
2616 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2617 mDummyBatchStream = *newStream;
2618 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002619 int bufferCount = MAX_INFLIGHT_REQUESTS;
2620 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2621 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002622 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2623 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2624 bufferCount = m_bIs4KVideo ?
2625 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2626 }
2627
Thierry Strudel2896d122017-02-23 19:18:03 -08002628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002629 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2630 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002631 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 this,
2633 newStream,
2634 (cam_stream_type_t)
2635 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2636 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2637 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002638 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002639 if (channel == NULL) {
2640 LOGE("allocation of channel failed");
2641 pthread_mutex_unlock(&mMutex);
2642 return -ENOMEM;
2643 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002644 /* disable UBWC for preview, though supported,
2645 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002646 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002647 (previewSize.width == (int32_t)videoWidth)&&
2648 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002649 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002650 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002651 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002652 /* When goog_zoom is linked to the preview or video stream,
2653 * disable ubwc to the linked stream */
2654 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2655 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2656 channel->setUBWCEnabled(false);
2657 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002658 newStream->max_buffers = channel->getNumBuffers();
2659 newStream->priv = channel;
2660 }
2661 break;
2662 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2663 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2664 mChannelHandle,
2665 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002666 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002667 this,
2668 newStream,
2669 (cam_stream_type_t)
2670 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2671 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2672 mMetadataChannel);
2673 if (channel == NULL) {
2674 LOGE("allocation of YUV channel failed");
2675 pthread_mutex_unlock(&mMutex);
2676 return -ENOMEM;
2677 }
2678 newStream->max_buffers = channel->getNumBuffers();
2679 newStream->priv = channel;
2680 break;
2681 }
2682 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2683 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002684 case HAL_PIXEL_FORMAT_RAW10: {
2685 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2686 (HAL_DATASPACE_DEPTH != newStream->data_space))
2687 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002688 mRawChannel = new QCamera3RawChannel(
2689 mCameraHandle->camera_handle, mChannelHandle,
2690 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002691 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002692 this, newStream,
2693 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002694 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002695 if (mRawChannel == NULL) {
2696 LOGE("allocation of raw channel failed");
2697 pthread_mutex_unlock(&mMutex);
2698 return -ENOMEM;
2699 }
2700 newStream->max_buffers = mRawChannel->getNumBuffers();
2701 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2702 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002703 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002704 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002705 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2706 mDepthChannel = new QCamera3DepthChannel(
2707 mCameraHandle->camera_handle, mChannelHandle,
2708 mCameraHandle->ops, NULL, NULL, &padding_info,
2709 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2710 mMetadataChannel);
2711 if (NULL == mDepthChannel) {
2712 LOGE("Allocation of depth channel failed");
2713 pthread_mutex_unlock(&mMutex);
2714 return NO_MEMORY;
2715 }
2716 newStream->priv = mDepthChannel;
2717 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2718 } else {
2719 // Max live snapshot inflight buffer is 1. This is to mitigate
2720 // frame drop issues for video snapshot. The more buffers being
2721 // allocated, the more frame drops there are.
2722 mPictureChannel = new QCamera3PicChannel(
2723 mCameraHandle->camera_handle, mChannelHandle,
2724 mCameraHandle->ops, captureResultCb,
2725 setBufferErrorStatus, &padding_info, this, newStream,
2726 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2727 m_bIs4KVideo, isZsl, mMetadataChannel,
2728 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2729 if (mPictureChannel == NULL) {
2730 LOGE("allocation of channel failed");
2731 pthread_mutex_unlock(&mMutex);
2732 return -ENOMEM;
2733 }
2734 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2735 newStream->max_buffers = mPictureChannel->getNumBuffers();
2736 mPictureChannel->overrideYuvSize(
2737 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2738 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002739 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002740 break;
2741
2742 default:
2743 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002744 pthread_mutex_unlock(&mMutex);
2745 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002746 }
2747 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2748 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2749 } else {
2750 LOGE("Error, Unknown stream type");
2751 pthread_mutex_unlock(&mMutex);
2752 return -EINVAL;
2753 }
2754
2755 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002756 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002757 // Here we only care whether it's EIS3 or not
2758 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2759 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2760 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2761 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002762 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002763 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002764 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002765 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2766 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2767 }
2768 }
2769
2770 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2771 it != mStreamInfo.end(); it++) {
2772 if ((*it)->stream == newStream) {
2773 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2774 break;
2775 }
2776 }
2777 } else {
2778 // Channel already exists for this stream
2779 // Do nothing for now
2780 }
2781 padding_info = gCamCapability[mCameraId]->padding_info;
2782
Emilian Peev7650c122017-01-19 08:24:33 -08002783 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002784 * since there is no real stream associated with it
2785 */
Emilian Peev7650c122017-01-19 08:24:33 -08002786 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002787 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2788 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002789 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002790 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002791 }
2792
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002793 // Let buffer dispatcher know the configured streams.
2794 mOutputBufferDispatcher.configureStreams(streamList);
2795
Thierry Strudel2896d122017-02-23 19:18:03 -08002796 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2797 onlyRaw = false;
2798 }
2799
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002800 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002801 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002802 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002803 cam_analysis_info_t analysisInfo;
2804 int32_t ret = NO_ERROR;
2805 ret = mCommon.getAnalysisInfo(
2806 FALSE,
2807 analysisFeatureMask,
2808 &analysisInfo);
2809 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002810 cam_color_filter_arrangement_t analysis_color_arrangement =
2811 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2812 CAM_FILTER_ARRANGEMENT_Y :
2813 gCamCapability[mCameraId]->color_arrangement);
2814 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2815 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002816 cam_dimension_t analysisDim;
2817 analysisDim = mCommon.getMatchingDimension(previewSize,
2818 analysisInfo.analysis_recommended_res);
2819
2820 mAnalysisChannel = new QCamera3SupportChannel(
2821 mCameraHandle->camera_handle,
2822 mChannelHandle,
2823 mCameraHandle->ops,
2824 &analysisInfo.analysis_padding_info,
2825 analysisFeatureMask,
2826 CAM_STREAM_TYPE_ANALYSIS,
2827 &analysisDim,
2828 (analysisInfo.analysis_format
2829 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2830 : CAM_FORMAT_YUV_420_NV21),
2831 analysisInfo.hw_analysis_supported,
2832 gCamCapability[mCameraId]->color_arrangement,
2833 this,
2834 0); // force buffer count to 0
2835 } else {
2836 LOGW("getAnalysisInfo failed, ret = %d", ret);
2837 }
2838 if (!mAnalysisChannel) {
2839 LOGW("Analysis channel cannot be created");
2840 }
2841 }
2842
Thierry Strudel3d639192016-09-09 11:52:26 -07002843 //RAW DUMP channel
2844 if (mEnableRawDump && isRawStreamRequested == false){
2845 cam_dimension_t rawDumpSize;
2846 rawDumpSize = getMaxRawSize(mCameraId);
2847 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2848 setPAAFSupport(rawDumpFeatureMask,
2849 CAM_STREAM_TYPE_RAW,
2850 gCamCapability[mCameraId]->color_arrangement);
2851 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2852 mChannelHandle,
2853 mCameraHandle->ops,
2854 rawDumpSize,
2855 &padding_info,
2856 this, rawDumpFeatureMask);
2857 if (!mRawDumpChannel) {
2858 LOGE("Raw Dump channel cannot be created");
2859 pthread_mutex_unlock(&mMutex);
2860 return -ENOMEM;
2861 }
2862 }
2863
Thierry Strudel3d639192016-09-09 11:52:26 -07002864 if (mAnalysisChannel) {
2865 cam_analysis_info_t analysisInfo;
2866 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2867 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2868 CAM_STREAM_TYPE_ANALYSIS;
2869 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2870 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002871 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002872 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2873 &analysisInfo);
2874 if (rc != NO_ERROR) {
2875 LOGE("getAnalysisInfo failed, ret = %d", rc);
2876 pthread_mutex_unlock(&mMutex);
2877 return rc;
2878 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002879 cam_color_filter_arrangement_t analysis_color_arrangement =
2880 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2881 CAM_FILTER_ARRANGEMENT_Y :
2882 gCamCapability[mCameraId]->color_arrangement);
2883 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2884 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2885 analysis_color_arrangement);
2886
Thierry Strudel3d639192016-09-09 11:52:26 -07002887 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002888 mCommon.getMatchingDimension(previewSize,
2889 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002890 mStreamConfigInfo.num_streams++;
2891 }
2892
Thierry Strudel2896d122017-02-23 19:18:03 -08002893 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002894 cam_analysis_info_t supportInfo;
2895 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2896 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2897 setPAAFSupport(callbackFeatureMask,
2898 CAM_STREAM_TYPE_CALLBACK,
2899 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002900 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002901 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002902 if (ret != NO_ERROR) {
2903 /* Ignore the error for Mono camera
2904 * because the PAAF bit mask is only set
2905 * for CAM_STREAM_TYPE_ANALYSIS stream type
2906 */
2907 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2908 LOGW("getAnalysisInfo failed, ret = %d", ret);
2909 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002910 }
2911 mSupportChannel = new QCamera3SupportChannel(
2912 mCameraHandle->camera_handle,
2913 mChannelHandle,
2914 mCameraHandle->ops,
2915 &gCamCapability[mCameraId]->padding_info,
2916 callbackFeatureMask,
2917 CAM_STREAM_TYPE_CALLBACK,
2918 &QCamera3SupportChannel::kDim,
2919 CAM_FORMAT_YUV_420_NV21,
2920 supportInfo.hw_analysis_supported,
2921 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002922 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002923 if (!mSupportChannel) {
2924 LOGE("dummy channel cannot be created");
2925 pthread_mutex_unlock(&mMutex);
2926 return -ENOMEM;
2927 }
2928 }
2929
2930 if (mSupportChannel) {
2931 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2932 QCamera3SupportChannel::kDim;
2933 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2934 CAM_STREAM_TYPE_CALLBACK;
2935 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2936 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2937 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2938 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2939 gCamCapability[mCameraId]->color_arrangement);
2940 mStreamConfigInfo.num_streams++;
2941 }
2942
2943 if (mRawDumpChannel) {
2944 cam_dimension_t rawSize;
2945 rawSize = getMaxRawSize(mCameraId);
2946 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2947 rawSize;
2948 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2949 CAM_STREAM_TYPE_RAW;
2950 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2951 CAM_QCOM_FEATURE_NONE;
2952 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2953 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2954 gCamCapability[mCameraId]->color_arrangement);
2955 mStreamConfigInfo.num_streams++;
2956 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002957
2958 if (mHdrPlusRawSrcChannel) {
2959 cam_dimension_t rawSize;
2960 rawSize = getMaxRawSize(mCameraId);
2961 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2962 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2963 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2964 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2965 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2966 gCamCapability[mCameraId]->color_arrangement);
2967 mStreamConfigInfo.num_streams++;
2968 }
2969
Thierry Strudel3d639192016-09-09 11:52:26 -07002970 /* In HFR mode, if video stream is not added, create a dummy channel so that
2971 * ISP can create a batch mode even for preview only case. This channel is
2972 * never 'start'ed (no stream-on), it is only 'initialized' */
2973 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2974 !m_bIsVideo) {
2975 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2976 setPAAFSupport(dummyFeatureMask,
2977 CAM_STREAM_TYPE_VIDEO,
2978 gCamCapability[mCameraId]->color_arrangement);
2979 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2980 mChannelHandle,
2981 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002982 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002983 this,
2984 &mDummyBatchStream,
2985 CAM_STREAM_TYPE_VIDEO,
2986 dummyFeatureMask,
2987 mMetadataChannel);
2988 if (NULL == mDummyBatchChannel) {
2989 LOGE("creation of mDummyBatchChannel failed."
2990 "Preview will use non-hfr sensor mode ");
2991 }
2992 }
2993 if (mDummyBatchChannel) {
2994 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2995 mDummyBatchStream.width;
2996 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2997 mDummyBatchStream.height;
2998 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2999 CAM_STREAM_TYPE_VIDEO;
3000 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3001 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3002 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3003 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3004 gCamCapability[mCameraId]->color_arrangement);
3005 mStreamConfigInfo.num_streams++;
3006 }
3007
3008 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3009 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003010 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003011 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003012
3013 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3014 for (pendingRequestIterator i = mPendingRequestsList.begin();
3015 i != mPendingRequestsList.end();) {
3016 i = erasePendingRequest(i);
3017 }
3018 mPendingFrameDropList.clear();
3019 // Initialize/Reset the pending buffers list
3020 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3021 req.mPendingBufferList.clear();
3022 }
3023 mPendingBuffersMap.mPendingBuffersInRequest.clear();
3024
Thierry Strudel3d639192016-09-09 11:52:26 -07003025 mCurJpegMeta.clear();
3026 //Get min frame duration for this streams configuration
3027 deriveMinFrameDuration();
3028
Chien-Yu Chenee335912017-02-09 17:53:20 -08003029 mFirstPreviewIntentSeen = false;
3030
3031 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003032 {
3033 Mutex::Autolock l(gHdrPlusClientLock);
3034 disableHdrPlusModeLocked();
3035 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003036
Thierry Strudel3d639192016-09-09 11:52:26 -07003037 // Update state
3038 mState = CONFIGURED;
3039
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003040 mFirstMetadataCallback = true;
3041
Thierry Strudel3d639192016-09-09 11:52:26 -07003042 pthread_mutex_unlock(&mMutex);
3043
3044 return rc;
3045}
3046
3047/*===========================================================================
3048 * FUNCTION : validateCaptureRequest
3049 *
3050 * DESCRIPTION: validate a capture request from camera service
3051 *
3052 * PARAMETERS :
3053 * @request : request from framework to process
3054 *
3055 * RETURN :
3056 *
3057 *==========================================================================*/
3058int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003059 camera3_capture_request_t *request,
3060 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003061{
3062 ssize_t idx = 0;
3063 const camera3_stream_buffer_t *b;
3064 CameraMetadata meta;
3065
3066 /* Sanity check the request */
3067 if (request == NULL) {
3068 LOGE("NULL capture request");
3069 return BAD_VALUE;
3070 }
3071
3072 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3073 /*settings cannot be null for the first request*/
3074 return BAD_VALUE;
3075 }
3076
3077 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003078 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3079 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003080 LOGE("Request %d: No output buffers provided!",
3081 __FUNCTION__, frameNumber);
3082 return BAD_VALUE;
3083 }
3084 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3085 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3086 request->num_output_buffers, MAX_NUM_STREAMS);
3087 return BAD_VALUE;
3088 }
3089 if (request->input_buffer != NULL) {
3090 b = request->input_buffer;
3091 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3092 LOGE("Request %d: Buffer %ld: Status not OK!",
3093 frameNumber, (long)idx);
3094 return BAD_VALUE;
3095 }
3096 if (b->release_fence != -1) {
3097 LOGE("Request %d: Buffer %ld: Has a release fence!",
3098 frameNumber, (long)idx);
3099 return BAD_VALUE;
3100 }
3101 if (b->buffer == NULL) {
3102 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3103 frameNumber, (long)idx);
3104 return BAD_VALUE;
3105 }
3106 }
3107
3108 // Validate all buffers
3109 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003110 if (b == NULL) {
3111 return BAD_VALUE;
3112 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003113 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003114 QCamera3ProcessingChannel *channel =
3115 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3116 if (channel == NULL) {
3117 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3118 frameNumber, (long)idx);
3119 return BAD_VALUE;
3120 }
3121 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3122 LOGE("Request %d: Buffer %ld: Status not OK!",
3123 frameNumber, (long)idx);
3124 return BAD_VALUE;
3125 }
3126 if (b->release_fence != -1) {
3127 LOGE("Request %d: Buffer %ld: Has a release fence!",
3128 frameNumber, (long)idx);
3129 return BAD_VALUE;
3130 }
3131 if (b->buffer == NULL) {
3132 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3133 frameNumber, (long)idx);
3134 return BAD_VALUE;
3135 }
3136 if (*(b->buffer) == NULL) {
3137 LOGE("Request %d: Buffer %ld: NULL private handle!",
3138 frameNumber, (long)idx);
3139 return BAD_VALUE;
3140 }
3141 idx++;
3142 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003143 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003144 return NO_ERROR;
3145}
3146
3147/*===========================================================================
3148 * FUNCTION : deriveMinFrameDuration
3149 *
3150 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3151 * on currently configured streams.
3152 *
3153 * PARAMETERS : NONE
3154 *
3155 * RETURN : NONE
3156 *
3157 *==========================================================================*/
3158void QCamera3HardwareInterface::deriveMinFrameDuration()
3159{
3160 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003161 bool hasRaw = false;
3162
3163 mMinRawFrameDuration = 0;
3164 mMinJpegFrameDuration = 0;
3165 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003166
3167 maxJpegDim = 0;
3168 maxProcessedDim = 0;
3169 maxRawDim = 0;
3170
3171 // Figure out maximum jpeg, processed, and raw dimensions
3172 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3173 it != mStreamInfo.end(); it++) {
3174
3175 // Input stream doesn't have valid stream_type
3176 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3177 continue;
3178
3179 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3180 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3181 if (dimension > maxJpegDim)
3182 maxJpegDim = dimension;
3183 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3184 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3185 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003186 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003187 if (dimension > maxRawDim)
3188 maxRawDim = dimension;
3189 } else {
3190 if (dimension > maxProcessedDim)
3191 maxProcessedDim = dimension;
3192 }
3193 }
3194
3195 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3196 MAX_SIZES_CNT);
3197
3198 //Assume all jpeg dimensions are in processed dimensions.
3199 if (maxJpegDim > maxProcessedDim)
3200 maxProcessedDim = maxJpegDim;
3201 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003202 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003203 maxRawDim = INT32_MAX;
3204
3205 for (size_t i = 0; i < count; i++) {
3206 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3207 gCamCapability[mCameraId]->raw_dim[i].height;
3208 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3209 maxRawDim = dimension;
3210 }
3211 }
3212
3213 //Find minimum durations for processed, jpeg, and raw
3214 for (size_t i = 0; i < count; i++) {
3215 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3216 gCamCapability[mCameraId]->raw_dim[i].height) {
3217 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3218 break;
3219 }
3220 }
3221 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3222 for (size_t i = 0; i < count; i++) {
3223 if (maxProcessedDim ==
3224 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3225 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3226 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3227 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3228 break;
3229 }
3230 }
3231}
3232
3233/*===========================================================================
3234 * FUNCTION : getMinFrameDuration
3235 *
3236 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3237 * and current request configuration.
3238 *
3239 * PARAMETERS : @request: requset sent by the frameworks
3240 *
3241 * RETURN : min farme duration for a particular request
3242 *
3243 *==========================================================================*/
3244int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3245{
3246 bool hasJpegStream = false;
3247 bool hasRawStream = false;
3248 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3249 const camera3_stream_t *stream = request->output_buffers[i].stream;
3250 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3251 hasJpegStream = true;
3252 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3253 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3254 stream->format == HAL_PIXEL_FORMAT_RAW16)
3255 hasRawStream = true;
3256 }
3257
3258 if (!hasJpegStream)
3259 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3260 else
3261 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3262}
3263
3264/*===========================================================================
3265 * FUNCTION : handleBuffersDuringFlushLock
3266 *
3267 * DESCRIPTION: Account for buffers returned from back-end during flush
3268 * This function is executed while mMutex is held by the caller.
3269 *
3270 * PARAMETERS :
3271 * @buffer: image buffer for the callback
3272 *
3273 * RETURN :
3274 *==========================================================================*/
3275void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3276{
3277 bool buffer_found = false;
3278 for (List<PendingBuffersInRequest>::iterator req =
3279 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3280 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3281 for (List<PendingBufferInfo>::iterator i =
3282 req->mPendingBufferList.begin();
3283 i != req->mPendingBufferList.end(); i++) {
3284 if (i->buffer == buffer->buffer) {
3285 mPendingBuffersMap.numPendingBufsAtFlush--;
3286 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3287 buffer->buffer, req->frame_number,
3288 mPendingBuffersMap.numPendingBufsAtFlush);
3289 buffer_found = true;
3290 break;
3291 }
3292 }
3293 if (buffer_found) {
3294 break;
3295 }
3296 }
3297 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3298 //signal the flush()
3299 LOGD("All buffers returned to HAL. Continue flush");
3300 pthread_cond_signal(&mBuffersCond);
3301 }
3302}
3303
Thierry Strudel3d639192016-09-09 11:52:26 -07003304/*===========================================================================
3305 * FUNCTION : handleBatchMetadata
3306 *
3307 * DESCRIPTION: Handles metadata buffer callback in batch mode
3308 *
3309 * PARAMETERS : @metadata_buf: metadata buffer
3310 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3311 * the meta buf in this method
3312 *
3313 * RETURN :
3314 *
3315 *==========================================================================*/
3316void QCamera3HardwareInterface::handleBatchMetadata(
3317 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3318{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003319 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003320
3321 if (NULL == metadata_buf) {
3322 LOGE("metadata_buf is NULL");
3323 return;
3324 }
3325 /* In batch mode, the metdata will contain the frame number and timestamp of
3326 * the last frame in the batch. Eg: a batch containing buffers from request
3327 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3328 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3329 * multiple process_capture_results */
3330 metadata_buffer_t *metadata =
3331 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3332 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3333 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3334 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3335 uint32_t frame_number = 0, urgent_frame_number = 0;
3336 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3337 bool invalid_metadata = false;
3338 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3339 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003340 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003341
3342 int32_t *p_frame_number_valid =
3343 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3344 uint32_t *p_frame_number =
3345 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3346 int64_t *p_capture_time =
3347 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3348 int32_t *p_urgent_frame_number_valid =
3349 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3350 uint32_t *p_urgent_frame_number =
3351 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3352
3353 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3354 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3355 (NULL == p_urgent_frame_number)) {
3356 LOGE("Invalid metadata");
3357 invalid_metadata = true;
3358 } else {
3359 frame_number_valid = *p_frame_number_valid;
3360 last_frame_number = *p_frame_number;
3361 last_frame_capture_time = *p_capture_time;
3362 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3363 last_urgent_frame_number = *p_urgent_frame_number;
3364 }
3365
3366 /* In batchmode, when no video buffers are requested, set_parms are sent
3367 * for every capture_request. The difference between consecutive urgent
3368 * frame numbers and frame numbers should be used to interpolate the
3369 * corresponding frame numbers and time stamps */
3370 pthread_mutex_lock(&mMutex);
3371 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003372 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3373 if(idx < 0) {
3374 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3375 last_urgent_frame_number);
3376 mState = ERROR;
3377 pthread_mutex_unlock(&mMutex);
3378 return;
3379 }
3380 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003381 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3382 first_urgent_frame_number;
3383
3384 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3385 urgent_frame_number_valid,
3386 first_urgent_frame_number, last_urgent_frame_number);
3387 }
3388
3389 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003390 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3391 if(idx < 0) {
3392 LOGE("Invalid frame number received: %d. Irrecoverable error",
3393 last_frame_number);
3394 mState = ERROR;
3395 pthread_mutex_unlock(&mMutex);
3396 return;
3397 }
3398 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003399 frameNumDiff = last_frame_number + 1 -
3400 first_frame_number;
3401 mPendingBatchMap.removeItem(last_frame_number);
3402
3403 LOGD("frm: valid: %d frm_num: %d - %d",
3404 frame_number_valid,
3405 first_frame_number, last_frame_number);
3406
3407 }
3408 pthread_mutex_unlock(&mMutex);
3409
3410 if (urgent_frame_number_valid || frame_number_valid) {
3411 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3412 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3413 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3414 urgentFrameNumDiff, last_urgent_frame_number);
3415 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3416 LOGE("frameNumDiff: %d frameNum: %d",
3417 frameNumDiff, last_frame_number);
3418 }
3419
3420 for (size_t i = 0; i < loopCount; i++) {
3421 /* handleMetadataWithLock is called even for invalid_metadata for
3422 * pipeline depth calculation */
3423 if (!invalid_metadata) {
3424 /* Infer frame number. Batch metadata contains frame number of the
3425 * last frame */
3426 if (urgent_frame_number_valid) {
3427 if (i < urgentFrameNumDiff) {
3428 urgent_frame_number =
3429 first_urgent_frame_number + i;
3430 LOGD("inferred urgent frame_number: %d",
3431 urgent_frame_number);
3432 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3433 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3434 } else {
3435 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3436 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3437 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3438 }
3439 }
3440
3441 /* Infer frame number. Batch metadata contains frame number of the
3442 * last frame */
3443 if (frame_number_valid) {
3444 if (i < frameNumDiff) {
3445 frame_number = first_frame_number + i;
3446 LOGD("inferred frame_number: %d", frame_number);
3447 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3448 CAM_INTF_META_FRAME_NUMBER, frame_number);
3449 } else {
3450 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3451 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3452 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3453 }
3454 }
3455
3456 if (last_frame_capture_time) {
3457 //Infer timestamp
3458 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003459 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003460 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003461 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003462 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3463 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3464 LOGD("batch capture_time: %lld, capture_time: %lld",
3465 last_frame_capture_time, capture_time);
3466 }
3467 }
3468 pthread_mutex_lock(&mMutex);
3469 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003470 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003471 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3472 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003473 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003474 pthread_mutex_unlock(&mMutex);
3475 }
3476
3477 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003478 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003479 mMetadataChannel->bufDone(metadata_buf);
3480 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003481 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003482 }
3483}
3484
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003485void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3486 camera3_error_msg_code_t errorCode)
3487{
3488 camera3_notify_msg_t notify_msg;
3489 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3490 notify_msg.type = CAMERA3_MSG_ERROR;
3491 notify_msg.message.error.error_code = errorCode;
3492 notify_msg.message.error.error_stream = NULL;
3493 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003494 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003495
3496 return;
3497}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003498
3499/*===========================================================================
3500 * FUNCTION : sendPartialMetadataWithLock
3501 *
3502 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3503 *
3504 * PARAMETERS : @metadata: metadata buffer
3505 * @requestIter: The iterator for the pending capture request for
3506 * which the partial result is being sen
3507 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3508 * last urgent metadata in a batch. Always true for non-batch mode
3509 *
3510 * RETURN :
3511 *
3512 *==========================================================================*/
3513
3514void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3515 metadata_buffer_t *metadata,
3516 const pendingRequestIterator requestIter,
3517 bool lastUrgentMetadataInBatch)
3518{
3519 camera3_capture_result_t result;
3520 memset(&result, 0, sizeof(camera3_capture_result_t));
3521
3522 requestIter->partial_result_cnt++;
3523
3524 // Extract 3A metadata
3525 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003526 metadata, lastUrgentMetadataInBatch, requestIter->frame_number);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003527 // Populate metadata result
3528 result.frame_number = requestIter->frame_number;
3529 result.num_output_buffers = 0;
3530 result.output_buffers = NULL;
3531 result.partial_result = requestIter->partial_result_cnt;
3532
3533 {
3534 Mutex::Autolock l(gHdrPlusClientLock);
3535 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3536 // Notify HDR+ client about the partial metadata.
3537 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3538 result.partial_result == PARTIAL_RESULT_COUNT);
3539 }
3540 }
3541
3542 orchestrateResult(&result);
3543 LOGD("urgent frame_number = %u", result.frame_number);
3544 free_camera_metadata((camera_metadata_t *)result.result);
3545}
3546
Thierry Strudel3d639192016-09-09 11:52:26 -07003547/*===========================================================================
3548 * FUNCTION : handleMetadataWithLock
3549 *
3550 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3551 *
3552 * PARAMETERS : @metadata_buf: metadata buffer
3553 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3554 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003555 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3556 * last urgent metadata in a batch. Always true for non-batch mode
3557 * @lastMetadataInBatch: Boolean to indicate whether this is the
3558 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003559 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3560 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003561 *
3562 * RETURN :
3563 *
3564 *==========================================================================*/
3565void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003566 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003567 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3568 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003569{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003570 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003571 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3572 //during flush do not send metadata from this thread
3573 LOGD("not sending metadata during flush or when mState is error");
3574 if (free_and_bufdone_meta_buf) {
3575 mMetadataChannel->bufDone(metadata_buf);
3576 free(metadata_buf);
3577 }
3578 return;
3579 }
3580
3581 //not in flush
3582 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3583 int32_t frame_number_valid, urgent_frame_number_valid;
3584 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003585 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003586 nsecs_t currentSysTime;
3587
3588 int32_t *p_frame_number_valid =
3589 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3590 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3591 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003592 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003593 int32_t *p_urgent_frame_number_valid =
3594 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3595 uint32_t *p_urgent_frame_number =
3596 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3597 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3598 metadata) {
3599 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3600 *p_frame_number_valid, *p_frame_number);
3601 }
3602
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003603 camera_metadata_t *resultMetadata = nullptr;
3604
Thierry Strudel3d639192016-09-09 11:52:26 -07003605 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3606 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3607 LOGE("Invalid metadata");
3608 if (free_and_bufdone_meta_buf) {
3609 mMetadataChannel->bufDone(metadata_buf);
3610 free(metadata_buf);
3611 }
3612 goto done_metadata;
3613 }
3614 frame_number_valid = *p_frame_number_valid;
3615 frame_number = *p_frame_number;
3616 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003617 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003618 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3619 urgent_frame_number = *p_urgent_frame_number;
3620 currentSysTime = systemTime(CLOCK_MONOTONIC);
3621
Jason Lee603176d2017-05-31 11:43:27 -07003622 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3623 const int tries = 3;
3624 nsecs_t bestGap, measured;
3625 for (int i = 0; i < tries; ++i) {
3626 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3627 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3628 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3629 const nsecs_t gap = tmono2 - tmono;
3630 if (i == 0 || gap < bestGap) {
3631 bestGap = gap;
3632 measured = tbase - ((tmono + tmono2) >> 1);
3633 }
3634 }
3635 capture_time -= measured;
3636 }
3637
Thierry Strudel3d639192016-09-09 11:52:26 -07003638 // Detect if buffers from any requests are overdue
3639 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003640 int64_t timeout;
3641 {
3642 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3643 // If there is a pending HDR+ request, the following requests may be blocked until the
3644 // HDR+ request is done. So allow a longer timeout.
3645 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3646 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3647 }
3648
3649 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003650 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003651 assert(missed.stream->priv);
3652 if (missed.stream->priv) {
3653 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3654 assert(ch->mStreams[0]);
3655 if (ch->mStreams[0]) {
3656 LOGE("Cancel missing frame = %d, buffer = %p,"
3657 "stream type = %d, stream format = %d",
3658 req.frame_number, missed.buffer,
3659 ch->mStreams[0]->getMyType(), missed.stream->format);
3660 ch->timeoutFrame(req.frame_number);
3661 }
3662 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003663 }
3664 }
3665 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003666 //For the very first metadata callback, regardless whether it contains valid
3667 //frame number, send the partial metadata for the jumpstarting requests.
3668 //Note that this has to be done even if the metadata doesn't contain valid
3669 //urgent frame number, because in the case only 1 request is ever submitted
3670 //to HAL, there won't be subsequent valid urgent frame number.
3671 if (mFirstMetadataCallback) {
3672 for (pendingRequestIterator i =
3673 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3674 if (i->bUseFirstPartial) {
3675 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3676 }
3677 }
3678 mFirstMetadataCallback = false;
3679 }
3680
Thierry Strudel3d639192016-09-09 11:52:26 -07003681 //Partial result on process_capture_result for timestamp
3682 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003683 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003684
3685 //Recieved an urgent Frame Number, handle it
3686 //using partial results
3687 for (pendingRequestIterator i =
3688 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3689 LOGD("Iterator Frame = %d urgent frame = %d",
3690 i->frame_number, urgent_frame_number);
3691
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003692 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 (i->partial_result_cnt == 0)) {
3694 LOGE("Error: HAL missed urgent metadata for frame number %d",
3695 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003696 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003697 }
3698
3699 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003700 i->partial_result_cnt == 0) {
3701 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003702 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3703 // Instant AEC settled for this frame.
3704 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3705 mInstantAECSettledFrameNumber = urgent_frame_number;
3706 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003707 break;
3708 }
3709 }
3710 }
3711
3712 if (!frame_number_valid) {
3713 LOGD("Not a valid normal frame number, used as SOF only");
3714 if (free_and_bufdone_meta_buf) {
3715 mMetadataChannel->bufDone(metadata_buf);
3716 free(metadata_buf);
3717 }
3718 goto done_metadata;
3719 }
3720 LOGH("valid frame_number = %u, capture_time = %lld",
3721 frame_number, capture_time);
3722
Emilian Peev4e0fe952017-06-30 12:40:09 -07003723 handleDepthDataLocked(metadata->depth_data, frame_number,
3724 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003725
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003726 // Check whether any stream buffer corresponding to this is dropped or not
3727 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3728 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3729 for (auto & pendingRequest : mPendingRequestsList) {
3730 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3731 mInstantAECSettledFrameNumber)) {
3732 camera3_notify_msg_t notify_msg = {};
3733 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003734 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003735 QCamera3ProcessingChannel *channel =
3736 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003737 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003738 if (p_cam_frame_drop) {
3739 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003740 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003741 // Got the stream ID for drop frame.
3742 dropFrame = true;
3743 break;
3744 }
3745 }
3746 } else {
3747 // This is instant AEC case.
3748 // For instant AEC drop the stream untill AEC is settled.
3749 dropFrame = true;
3750 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003751
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003752 if (dropFrame) {
3753 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3754 if (p_cam_frame_drop) {
3755 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003756 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003757 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003758 } else {
3759 // For instant AEC, inform frame drop and frame number
3760 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3761 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003762 pendingRequest.frame_number, streamID,
3763 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003764 }
3765 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003766 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003767 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003768 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003769 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003770 if (p_cam_frame_drop) {
3771 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003772 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003773 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003774 } else {
3775 // For instant AEC, inform frame drop and frame number
3776 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3777 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003778 pendingRequest.frame_number, streamID,
3779 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003780 }
3781 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003782 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003783 PendingFrameDrop.stream_ID = streamID;
3784 // Add the Frame drop info to mPendingFrameDropList
3785 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003787 }
3788 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003789 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003790
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003791 for (auto & pendingRequest : mPendingRequestsList) {
3792 // Find the pending request with the frame number.
3793 if (pendingRequest.frame_number == frame_number) {
3794 // Update the sensor timestamp.
3795 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003796
Thierry Strudel3d639192016-09-09 11:52:26 -07003797
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003798 /* Set the timestamp in display metadata so that clients aware of
3799 private_handle such as VT can use this un-modified timestamps.
3800 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003801 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003802
Thierry Strudel3d639192016-09-09 11:52:26 -07003803 // Find channel requiring metadata, meaning internal offline postprocess
3804 // is needed.
3805 //TODO: for now, we don't support two streams requiring metadata at the same time.
3806 // (because we are not making copies, and metadata buffer is not reference counted.
3807 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003808 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3809 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003810 if (iter->need_metadata) {
3811 internalPproc = true;
3812 QCamera3ProcessingChannel *channel =
3813 (QCamera3ProcessingChannel *)iter->stream->priv;
3814 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003815 if(p_is_metabuf_queued != NULL) {
3816 *p_is_metabuf_queued = true;
3817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003818 break;
3819 }
3820 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003821 for (auto itr = pendingRequest.internalRequestList.begin();
3822 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003823 if (itr->need_metadata) {
3824 internalPproc = true;
3825 QCamera3ProcessingChannel *channel =
3826 (QCamera3ProcessingChannel *)itr->stream->priv;
3827 channel->queueReprocMetadata(metadata_buf);
3828 break;
3829 }
3830 }
3831
Thierry Strudel54dc9782017-02-15 12:12:10 -08003832 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003833
3834 bool *enableZsl = nullptr;
3835 if (gExposeEnableZslKey) {
3836 enableZsl = &pendingRequest.enableZsl;
3837 }
3838
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003839 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003840 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003841 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003842
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003843 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003844
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003845 if (pendingRequest.blob_request) {
3846 //Dump tuning metadata if enabled and available
3847 char prop[PROPERTY_VALUE_MAX];
3848 memset(prop, 0, sizeof(prop));
3849 property_get("persist.camera.dumpmetadata", prop, "0");
3850 int32_t enabled = atoi(prop);
3851 if (enabled && metadata->is_tuning_params_valid) {
3852 dumpMetadataToFile(metadata->tuning_params,
3853 mMetaFrameCount,
3854 enabled,
3855 "Snapshot",
3856 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003857 }
3858 }
3859
3860 if (!internalPproc) {
3861 LOGD("couldn't find need_metadata for this metadata");
3862 // Return metadata buffer
3863 if (free_and_bufdone_meta_buf) {
3864 mMetadataChannel->bufDone(metadata_buf);
3865 free(metadata_buf);
3866 }
3867 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003868
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003869 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003870 }
3871 }
3872
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003873 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3874
3875 // Try to send out capture result metadata.
3876 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003877 return;
3878
Thierry Strudel3d639192016-09-09 11:52:26 -07003879done_metadata:
3880 for (pendingRequestIterator i = mPendingRequestsList.begin();
3881 i != mPendingRequestsList.end() ;i++) {
3882 i->pipeline_depth++;
3883 }
3884 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3885 unblockRequestIfNecessary();
3886}
3887
3888/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003889 * FUNCTION : handleDepthDataWithLock
3890 *
3891 * DESCRIPTION: Handles incoming depth data
3892 *
3893 * PARAMETERS : @depthData : Depth data
3894 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003895 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003896 *
3897 * RETURN :
3898 *
3899 *==========================================================================*/
3900void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003901 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003902 uint32_t currentFrameNumber;
3903 buffer_handle_t *depthBuffer;
3904
3905 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003906 return;
3907 }
3908
3909 camera3_stream_buffer_t resultBuffer =
3910 {.acquire_fence = -1,
3911 .release_fence = -1,
3912 .status = CAMERA3_BUFFER_STATUS_OK,
3913 .buffer = nullptr,
3914 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003915 do {
3916 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3917 if (nullptr == depthBuffer) {
3918 break;
3919 }
3920
Emilian Peev7650c122017-01-19 08:24:33 -08003921 resultBuffer.buffer = depthBuffer;
3922 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003923 if (valid) {
3924 int32_t rc = mDepthChannel->populateDepthData(depthData,
3925 frameNumber);
3926 if (NO_ERROR != rc) {
3927 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3928 } else {
3929 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3930 }
Emilian Peev7650c122017-01-19 08:24:33 -08003931 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003932 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003933 }
3934 } else if (currentFrameNumber > frameNumber) {
3935 break;
3936 } else {
3937 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3938 {{currentFrameNumber, mDepthChannel->getStream(),
3939 CAMERA3_MSG_ERROR_BUFFER}}};
3940 orchestrateNotify(&notify_msg);
3941
3942 LOGE("Depth buffer for frame number: %d is missing "
3943 "returning back!", currentFrameNumber);
3944 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3945 }
3946 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003947 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003948 } while (currentFrameNumber < frameNumber);
3949}
3950
3951/*===========================================================================
3952 * FUNCTION : notifyErrorFoPendingDepthData
3953 *
3954 * DESCRIPTION: Returns error for any pending depth buffers
3955 *
3956 * PARAMETERS : depthCh - depth channel that needs to get flushed
3957 *
3958 * RETURN :
3959 *
3960 *==========================================================================*/
3961void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3962 QCamera3DepthChannel *depthCh) {
3963 uint32_t currentFrameNumber;
3964 buffer_handle_t *depthBuffer;
3965
3966 if (nullptr == depthCh) {
3967 return;
3968 }
3969
3970 camera3_notify_msg_t notify_msg =
3971 {.type = CAMERA3_MSG_ERROR,
3972 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3973 camera3_stream_buffer_t resultBuffer =
3974 {.acquire_fence = -1,
3975 .release_fence = -1,
3976 .buffer = nullptr,
3977 .stream = depthCh->getStream(),
3978 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003979
3980 while (nullptr !=
3981 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3982 depthCh->unmapBuffer(currentFrameNumber);
3983
3984 notify_msg.message.error.frame_number = currentFrameNumber;
3985 orchestrateNotify(&notify_msg);
3986
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003987 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003988 };
3989}
3990
3991/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003992 * FUNCTION : hdrPlusPerfLock
3993 *
3994 * DESCRIPTION: perf lock for HDR+ using custom intent
3995 *
3996 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3997 *
3998 * RETURN : None
3999 *
4000 *==========================================================================*/
4001void QCamera3HardwareInterface::hdrPlusPerfLock(
4002 mm_camera_super_buf_t *metadata_buf)
4003{
4004 if (NULL == metadata_buf) {
4005 LOGE("metadata_buf is NULL");
4006 return;
4007 }
4008 metadata_buffer_t *metadata =
4009 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4010 int32_t *p_frame_number_valid =
4011 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4012 uint32_t *p_frame_number =
4013 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4014
4015 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4016 LOGE("%s: Invalid metadata", __func__);
4017 return;
4018 }
4019
4020 //acquire perf lock for 5 sec after the last HDR frame is captured
4021 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4022 if ((p_frame_number != NULL) &&
4023 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004024 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004025 }
4026 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004027}
4028
4029/*===========================================================================
4030 * FUNCTION : handleInputBufferWithLock
4031 *
4032 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4033 *
4034 * PARAMETERS : @frame_number: frame number of the input buffer
4035 *
4036 * RETURN :
4037 *
4038 *==========================================================================*/
4039void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4040{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004041 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004042 pendingRequestIterator i = mPendingRequestsList.begin();
4043 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4044 i++;
4045 }
4046 if (i != mPendingRequestsList.end() && i->input_buffer) {
4047 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004048 CameraMetadata settings;
4049 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4050 if(i->settings) {
4051 settings = i->settings;
4052 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4053 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004054 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004055 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004056 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004057 } else {
4058 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004059 }
4060
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004061 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4062 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4063 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004064
4065 camera3_capture_result result;
4066 memset(&result, 0, sizeof(camera3_capture_result));
4067 result.frame_number = frame_number;
4068 result.result = i->settings;
4069 result.input_buffer = i->input_buffer;
4070 result.partial_result = PARTIAL_RESULT_COUNT;
4071
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004072 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004073 LOGD("Input request metadata and input buffer frame_number = %u",
4074 i->frame_number);
4075 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004076
4077 // Dispatch result metadata that may be just unblocked by this reprocess result.
4078 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004079 } else {
4080 LOGE("Could not find input request for frame number %d", frame_number);
4081 }
4082}
4083
4084/*===========================================================================
4085 * FUNCTION : handleBufferWithLock
4086 *
4087 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4088 *
4089 * PARAMETERS : @buffer: image buffer for the callback
4090 * @frame_number: frame number of the image buffer
4091 *
4092 * RETURN :
4093 *
4094 *==========================================================================*/
4095void QCamera3HardwareInterface::handleBufferWithLock(
4096 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4097{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004098 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004099
4100 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4101 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4102 }
4103
Thierry Strudel3d639192016-09-09 11:52:26 -07004104 /* Nothing to be done during error state */
4105 if ((ERROR == mState) || (DEINIT == mState)) {
4106 return;
4107 }
4108 if (mFlushPerf) {
4109 handleBuffersDuringFlushLock(buffer);
4110 return;
4111 }
4112 //not in flush
4113 // If the frame number doesn't exist in the pending request list,
4114 // directly send the buffer to the frameworks, and update pending buffers map
4115 // Otherwise, book-keep the buffer.
4116 pendingRequestIterator i = mPendingRequestsList.begin();
4117 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4118 i++;
4119 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004120
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004121 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004122 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004123 // For a reprocessing request, try to send out result metadata.
4124 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004125 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004126 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004127
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004128 // Check if this frame was dropped.
4129 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4130 m != mPendingFrameDropList.end(); m++) {
4131 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4132 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4133 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4134 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4135 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4136 frame_number, streamID);
4137 m = mPendingFrameDropList.erase(m);
4138 break;
4139 }
4140 }
4141
4142 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4143 LOGH("result frame_number = %d, buffer = %p",
4144 frame_number, buffer->buffer);
4145
4146 mPendingBuffersMap.removeBuf(buffer->buffer);
4147 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4148
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004149 if (mPreviewStarted == false) {
4150 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4151 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004152 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4153
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004154 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4155 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4156 mPreviewStarted = true;
4157
4158 // Set power hint for preview
4159 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4160 }
4161 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004162}
4163
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004164void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004165 const camera_metadata_t *resultMetadata)
4166{
4167 // Find the pending request for this result metadata.
4168 auto requestIter = mPendingRequestsList.begin();
4169 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4170 requestIter++;
4171 }
4172
4173 if (requestIter == mPendingRequestsList.end()) {
4174 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4175 return;
4176 }
4177
4178 // Update the result metadata
4179 requestIter->resultMetadata = resultMetadata;
4180
4181 // Check what type of request this is.
4182 bool liveRequest = false;
4183 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004184 // HDR+ request doesn't have partial results.
4185 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004186 } else if (requestIter->input_buffer != nullptr) {
4187 // Reprocessing request result is the same as settings.
4188 requestIter->resultMetadata = requestIter->settings;
4189 // Reprocessing request doesn't have partial results.
4190 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4191 } else {
4192 liveRequest = true;
4193 requestIter->partial_result_cnt++;
4194 mPendingLiveRequest--;
4195
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004196 {
4197 Mutex::Autolock l(gHdrPlusClientLock);
4198 // For a live request, send the metadata to HDR+ client.
4199 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4200 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4201 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4202 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004203 }
4204 }
4205
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004206 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4207}
4208
4209void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4210 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004211 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4212 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004213 bool readyToSend = true;
4214
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004215 // Iterate through the pending requests to send out result metadata that are ready. Also if
4216 // this result metadata belongs to a live request, notify errors for previous live requests
4217 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004218 auto iter = mPendingRequestsList.begin();
4219 while (iter != mPendingRequestsList.end()) {
4220 // Check if current pending request is ready. If it's not ready, the following pending
4221 // requests are also not ready.
4222 if (readyToSend && iter->resultMetadata == nullptr) {
4223 readyToSend = false;
4224 }
4225
4226 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4227
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004228 camera3_capture_result_t result = {};
4229 result.frame_number = iter->frame_number;
4230 result.result = iter->resultMetadata;
4231 result.partial_result = iter->partial_result_cnt;
4232
4233 // If this pending buffer has result metadata, we may be able to send out shutter callback
4234 // and result metadata.
4235 if (iter->resultMetadata != nullptr) {
4236 if (!readyToSend) {
4237 // If any of the previous pending request is not ready, this pending request is
4238 // also not ready to send in order to keep shutter callbacks and result metadata
4239 // in order.
4240 iter++;
4241 continue;
4242 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004243 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004244 // If the result metadata belongs to a live request, notify errors for previous pending
4245 // live requests.
4246 mPendingLiveRequest--;
4247
4248 CameraMetadata dummyMetadata;
4249 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4250 result.result = dummyMetadata.release();
4251
4252 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004253
4254 // partial_result should be PARTIAL_RESULT_CNT in case of
4255 // ERROR_RESULT.
4256 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4257 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004258 } else {
4259 iter++;
4260 continue;
4261 }
4262
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004263 result.output_buffers = nullptr;
4264 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004265 orchestrateResult(&result);
4266
4267 // For reprocessing, result metadata is the same as settings so do not free it here to
4268 // avoid double free.
4269 if (result.result != iter->settings) {
4270 free_camera_metadata((camera_metadata_t *)result.result);
4271 }
4272 iter->resultMetadata = nullptr;
4273 iter = erasePendingRequest(iter);
4274 }
4275
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004276 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004277 for (auto &iter : mPendingRequestsList) {
4278 // Increment pipeline depth for the following pending requests.
4279 if (iter.frame_number > frameNumber) {
4280 iter.pipeline_depth++;
4281 }
4282 }
4283 }
4284
4285 unblockRequestIfNecessary();
4286}
4287
Thierry Strudel3d639192016-09-09 11:52:26 -07004288/*===========================================================================
4289 * FUNCTION : unblockRequestIfNecessary
4290 *
4291 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4292 * that mMutex is held when this function is called.
4293 *
4294 * PARAMETERS :
4295 *
4296 * RETURN :
4297 *
4298 *==========================================================================*/
4299void QCamera3HardwareInterface::unblockRequestIfNecessary()
4300{
4301 // Unblock process_capture_request
4302 pthread_cond_signal(&mRequestCond);
4303}
4304
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004305/*===========================================================================
4306 * FUNCTION : isHdrSnapshotRequest
4307 *
4308 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4309 *
4310 * PARAMETERS : camera3 request structure
4311 *
4312 * RETURN : boolean decision variable
4313 *
4314 *==========================================================================*/
4315bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4316{
4317 if (request == NULL) {
4318 LOGE("Invalid request handle");
4319 assert(0);
4320 return false;
4321 }
4322
4323 if (!mForceHdrSnapshot) {
4324 CameraMetadata frame_settings;
4325 frame_settings = request->settings;
4326
4327 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4328 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4329 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4330 return false;
4331 }
4332 } else {
4333 return false;
4334 }
4335
4336 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4337 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4338 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4339 return false;
4340 }
4341 } else {
4342 return false;
4343 }
4344 }
4345
4346 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4347 if (request->output_buffers[i].stream->format
4348 == HAL_PIXEL_FORMAT_BLOB) {
4349 return true;
4350 }
4351 }
4352
4353 return false;
4354}
4355/*===========================================================================
4356 * FUNCTION : orchestrateRequest
4357 *
4358 * DESCRIPTION: Orchestrates a capture request from camera service
4359 *
4360 * PARAMETERS :
4361 * @request : request from framework to process
4362 *
4363 * RETURN : Error status codes
4364 *
4365 *==========================================================================*/
4366int32_t QCamera3HardwareInterface::orchestrateRequest(
4367 camera3_capture_request_t *request)
4368{
4369
4370 uint32_t originalFrameNumber = request->frame_number;
4371 uint32_t originalOutputCount = request->num_output_buffers;
4372 const camera_metadata_t *original_settings = request->settings;
4373 List<InternalRequest> internallyRequestedStreams;
4374 List<InternalRequest> emptyInternalList;
4375
4376 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4377 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4378 uint32_t internalFrameNumber;
4379 CameraMetadata modified_meta;
4380
4381
4382 /* Add Blob channel to list of internally requested streams */
4383 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4384 if (request->output_buffers[i].stream->format
4385 == HAL_PIXEL_FORMAT_BLOB) {
4386 InternalRequest streamRequested;
4387 streamRequested.meteringOnly = 1;
4388 streamRequested.need_metadata = 0;
4389 streamRequested.stream = request->output_buffers[i].stream;
4390 internallyRequestedStreams.push_back(streamRequested);
4391 }
4392 }
4393 request->num_output_buffers = 0;
4394 auto itr = internallyRequestedStreams.begin();
4395
4396 /* Modify setting to set compensation */
4397 modified_meta = request->settings;
4398 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4399 uint8_t aeLock = 1;
4400 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4401 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4402 camera_metadata_t *modified_settings = modified_meta.release();
4403 request->settings = modified_settings;
4404
4405 /* Capture Settling & -2x frame */
4406 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4407 request->frame_number = internalFrameNumber;
4408 processCaptureRequest(request, internallyRequestedStreams);
4409
4410 request->num_output_buffers = originalOutputCount;
4411 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4412 request->frame_number = internalFrameNumber;
4413 processCaptureRequest(request, emptyInternalList);
4414 request->num_output_buffers = 0;
4415
4416 modified_meta = modified_settings;
4417 expCompensation = 0;
4418 aeLock = 1;
4419 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4420 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4421 modified_settings = modified_meta.release();
4422 request->settings = modified_settings;
4423
4424 /* Capture Settling & 0X frame */
4425
4426 itr = internallyRequestedStreams.begin();
4427 if (itr == internallyRequestedStreams.end()) {
4428 LOGE("Error Internally Requested Stream list is empty");
4429 assert(0);
4430 } else {
4431 itr->need_metadata = 0;
4432 itr->meteringOnly = 1;
4433 }
4434
4435 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4436 request->frame_number = internalFrameNumber;
4437 processCaptureRequest(request, internallyRequestedStreams);
4438
4439 itr = internallyRequestedStreams.begin();
4440 if (itr == internallyRequestedStreams.end()) {
4441 ALOGE("Error Internally Requested Stream list is empty");
4442 assert(0);
4443 } else {
4444 itr->need_metadata = 1;
4445 itr->meteringOnly = 0;
4446 }
4447
4448 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4449 request->frame_number = internalFrameNumber;
4450 processCaptureRequest(request, internallyRequestedStreams);
4451
4452 /* Capture 2X frame*/
4453 modified_meta = modified_settings;
4454 expCompensation = GB_HDR_2X_STEP_EV;
4455 aeLock = 1;
4456 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4457 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4458 modified_settings = modified_meta.release();
4459 request->settings = modified_settings;
4460
4461 itr = internallyRequestedStreams.begin();
4462 if (itr == internallyRequestedStreams.end()) {
4463 ALOGE("Error Internally Requested Stream list is empty");
4464 assert(0);
4465 } else {
4466 itr->need_metadata = 0;
4467 itr->meteringOnly = 1;
4468 }
4469 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4470 request->frame_number = internalFrameNumber;
4471 processCaptureRequest(request, internallyRequestedStreams);
4472
4473 itr = internallyRequestedStreams.begin();
4474 if (itr == internallyRequestedStreams.end()) {
4475 ALOGE("Error Internally Requested Stream list is empty");
4476 assert(0);
4477 } else {
4478 itr->need_metadata = 1;
4479 itr->meteringOnly = 0;
4480 }
4481
4482 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4483 request->frame_number = internalFrameNumber;
4484 processCaptureRequest(request, internallyRequestedStreams);
4485
4486
4487 /* Capture 2X on original streaming config*/
4488 internallyRequestedStreams.clear();
4489
4490 /* Restore original settings pointer */
4491 request->settings = original_settings;
4492 } else {
4493 uint32_t internalFrameNumber;
4494 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4495 request->frame_number = internalFrameNumber;
4496 return processCaptureRequest(request, internallyRequestedStreams);
4497 }
4498
4499 return NO_ERROR;
4500}
4501
4502/*===========================================================================
4503 * FUNCTION : orchestrateResult
4504 *
4505 * DESCRIPTION: Orchestrates a capture result to camera service
4506 *
4507 * PARAMETERS :
4508 * @request : request from framework to process
4509 *
4510 * RETURN :
4511 *
4512 *==========================================================================*/
4513void QCamera3HardwareInterface::orchestrateResult(
4514 camera3_capture_result_t *result)
4515{
4516 uint32_t frameworkFrameNumber;
4517 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4518 frameworkFrameNumber);
4519 if (rc != NO_ERROR) {
4520 LOGE("Cannot find translated frameworkFrameNumber");
4521 assert(0);
4522 } else {
4523 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004524 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004525 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004526 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004527 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4528 camera_metadata_entry_t entry;
4529 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4530 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004531 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004532 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4533 if (ret != OK)
4534 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004535 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004536 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004537 result->frame_number = frameworkFrameNumber;
4538 mCallbackOps->process_capture_result(mCallbackOps, result);
4539 }
4540 }
4541}
4542
4543/*===========================================================================
4544 * FUNCTION : orchestrateNotify
4545 *
4546 * DESCRIPTION: Orchestrates a notify to camera service
4547 *
4548 * PARAMETERS :
4549 * @request : request from framework to process
4550 *
4551 * RETURN :
4552 *
4553 *==========================================================================*/
4554void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4555{
4556 uint32_t frameworkFrameNumber;
4557 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004558 int32_t rc = NO_ERROR;
4559
4560 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004561 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004562
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004563 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004564 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4565 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4566 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004567 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004568 LOGE("Cannot find translated frameworkFrameNumber");
4569 assert(0);
4570 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004571 }
4572 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004573
4574 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4575 LOGD("Internal Request drop the notifyCb");
4576 } else {
4577 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4578 mCallbackOps->notify(mCallbackOps, notify_msg);
4579 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004580}
4581
4582/*===========================================================================
4583 * FUNCTION : FrameNumberRegistry
4584 *
4585 * DESCRIPTION: Constructor
4586 *
4587 * PARAMETERS :
4588 *
4589 * RETURN :
4590 *
4591 *==========================================================================*/
4592FrameNumberRegistry::FrameNumberRegistry()
4593{
4594 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4595}
4596
4597/*===========================================================================
4598 * FUNCTION : ~FrameNumberRegistry
4599 *
4600 * DESCRIPTION: Destructor
4601 *
4602 * PARAMETERS :
4603 *
4604 * RETURN :
4605 *
4606 *==========================================================================*/
4607FrameNumberRegistry::~FrameNumberRegistry()
4608{
4609}
4610
4611/*===========================================================================
4612 * FUNCTION : PurgeOldEntriesLocked
4613 *
4614 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4615 *
4616 * PARAMETERS :
4617 *
4618 * RETURN : NONE
4619 *
4620 *==========================================================================*/
4621void FrameNumberRegistry::purgeOldEntriesLocked()
4622{
4623 while (_register.begin() != _register.end()) {
4624 auto itr = _register.begin();
4625 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4626 _register.erase(itr);
4627 } else {
4628 return;
4629 }
4630 }
4631}
4632
4633/*===========================================================================
4634 * FUNCTION : allocStoreInternalFrameNumber
4635 *
4636 * DESCRIPTION: Method to note down a framework request and associate a new
4637 * internal request number against it
4638 *
4639 * PARAMETERS :
4640 * @fFrameNumber: Identifier given by framework
4641 * @internalFN : Output parameter which will have the newly generated internal
4642 * entry
4643 *
4644 * RETURN : Error code
4645 *
4646 *==========================================================================*/
4647int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4648 uint32_t &internalFrameNumber)
4649{
4650 Mutex::Autolock lock(mRegistryLock);
4651 internalFrameNumber = _nextFreeInternalNumber++;
4652 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4653 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4654 purgeOldEntriesLocked();
4655 return NO_ERROR;
4656}
4657
4658/*===========================================================================
4659 * FUNCTION : generateStoreInternalFrameNumber
4660 *
4661 * DESCRIPTION: Method to associate a new internal request number independent
4662 * of any associate with framework requests
4663 *
4664 * PARAMETERS :
4665 * @internalFrame#: Output parameter which will have the newly generated internal
4666 *
4667 *
4668 * RETURN : Error code
4669 *
4670 *==========================================================================*/
4671int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4672{
4673 Mutex::Autolock lock(mRegistryLock);
4674 internalFrameNumber = _nextFreeInternalNumber++;
4675 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4676 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4677 purgeOldEntriesLocked();
4678 return NO_ERROR;
4679}
4680
4681/*===========================================================================
4682 * FUNCTION : getFrameworkFrameNumber
4683 *
4684 * DESCRIPTION: Method to query the framework framenumber given an internal #
4685 *
4686 * PARAMETERS :
4687 * @internalFrame#: Internal reference
4688 * @frameworkframenumber: Output parameter holding framework frame entry
4689 *
4690 * RETURN : Error code
4691 *
4692 *==========================================================================*/
4693int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4694 uint32_t &frameworkFrameNumber)
4695{
4696 Mutex::Autolock lock(mRegistryLock);
4697 auto itr = _register.find(internalFrameNumber);
4698 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004699 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004700 return -ENOENT;
4701 }
4702
4703 frameworkFrameNumber = itr->second;
4704 purgeOldEntriesLocked();
4705 return NO_ERROR;
4706}
Thierry Strudel3d639192016-09-09 11:52:26 -07004707
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004708status_t QCamera3HardwareInterface::fillPbStreamConfig(
4709 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4710 QCamera3Channel *channel, uint32_t streamIndex) {
4711 if (config == nullptr) {
4712 LOGE("%s: config is null", __FUNCTION__);
4713 return BAD_VALUE;
4714 }
4715
4716 if (channel == nullptr) {
4717 LOGE("%s: channel is null", __FUNCTION__);
4718 return BAD_VALUE;
4719 }
4720
4721 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4722 if (stream == nullptr) {
4723 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4724 return NAME_NOT_FOUND;
4725 }
4726
4727 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4728 if (streamInfo == nullptr) {
4729 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4730 return NAME_NOT_FOUND;
4731 }
4732
4733 config->id = pbStreamId;
4734 config->image.width = streamInfo->dim.width;
4735 config->image.height = streamInfo->dim.height;
4736 config->image.padding = 0;
4737 config->image.format = pbStreamFormat;
4738
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004739 uint32_t totalPlaneSize = 0;
4740
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004741 // Fill plane information.
4742 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4743 pbcamera::PlaneConfiguration plane;
4744 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4745 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4746 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004747
4748 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004749 }
4750
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004751 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004752 return OK;
4753}
4754
Thierry Strudel3d639192016-09-09 11:52:26 -07004755/*===========================================================================
4756 * FUNCTION : processCaptureRequest
4757 *
4758 * DESCRIPTION: process a capture request from camera service
4759 *
4760 * PARAMETERS :
4761 * @request : request from framework to process
4762 *
4763 * RETURN :
4764 *
4765 *==========================================================================*/
4766int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004767 camera3_capture_request_t *request,
4768 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004769{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004770 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004771 int rc = NO_ERROR;
4772 int32_t request_id;
4773 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004774 bool isVidBufRequested = false;
4775 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004776 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004777
4778 pthread_mutex_lock(&mMutex);
4779
4780 // Validate current state
4781 switch (mState) {
4782 case CONFIGURED:
4783 case STARTED:
4784 /* valid state */
4785 break;
4786
4787 case ERROR:
4788 pthread_mutex_unlock(&mMutex);
4789 handleCameraDeviceError();
4790 return -ENODEV;
4791
4792 default:
4793 LOGE("Invalid state %d", mState);
4794 pthread_mutex_unlock(&mMutex);
4795 return -ENODEV;
4796 }
4797
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004798 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004799 if (rc != NO_ERROR) {
4800 LOGE("incoming request is not valid");
4801 pthread_mutex_unlock(&mMutex);
4802 return rc;
4803 }
4804
4805 meta = request->settings;
4806
4807 // For first capture request, send capture intent, and
4808 // stream on all streams
4809 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004810 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004811 // send an unconfigure to the backend so that the isp
4812 // resources are deallocated
4813 if (!mFirstConfiguration) {
4814 cam_stream_size_info_t stream_config_info;
4815 int32_t hal_version = CAM_HAL_V3;
4816 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4817 stream_config_info.buffer_info.min_buffers =
4818 MIN_INFLIGHT_REQUESTS;
4819 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004820 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004821 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004822 clear_metadata_buffer(mParameters);
4823 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4824 CAM_INTF_PARM_HAL_VERSION, hal_version);
4825 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4826 CAM_INTF_META_STREAM_INFO, stream_config_info);
4827 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4828 mParameters);
4829 if (rc < 0) {
4830 LOGE("set_parms for unconfigure failed");
4831 pthread_mutex_unlock(&mMutex);
4832 return rc;
4833 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004834
Thierry Strudel3d639192016-09-09 11:52:26 -07004835 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004836 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004837 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004838 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004839 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004840 property_get("persist.camera.is_type", is_type_value, "4");
4841 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4842 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4843 property_get("persist.camera.is_type_preview", is_type_value, "4");
4844 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4845 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004846
4847 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4848 int32_t hal_version = CAM_HAL_V3;
4849 uint8_t captureIntent =
4850 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4851 mCaptureIntent = captureIntent;
4852 clear_metadata_buffer(mParameters);
4853 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4854 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4855 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004856 if (mFirstConfiguration) {
4857 // configure instant AEC
4858 // Instant AEC is a session based parameter and it is needed only
4859 // once per complete session after open camera.
4860 // i.e. This is set only once for the first capture request, after open camera.
4861 setInstantAEC(meta);
4862 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004863 uint8_t fwkVideoStabMode=0;
4864 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4865 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4866 }
4867
Xue Tuecac74e2017-04-17 13:58:15 -07004868 // If EIS setprop is enabled then only turn it on for video/preview
4869 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004870 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004871 int32_t vsMode;
4872 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4873 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4874 rc = BAD_VALUE;
4875 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004876 LOGD("setEis %d", setEis);
4877 bool eis3Supported = false;
4878 size_t count = IS_TYPE_MAX;
4879 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4880 for (size_t i = 0; i < count; i++) {
4881 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4882 eis3Supported = true;
4883 break;
4884 }
4885 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004886
4887 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004888 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004889 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4890 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004891 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4892 is_type = isTypePreview;
4893 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4894 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4895 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004896 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004897 } else {
4898 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004899 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004900 } else {
4901 is_type = IS_TYPE_NONE;
4902 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004904 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004905 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4906 }
4907 }
4908
4909 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4910 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4911
Thierry Strudel54dc9782017-02-15 12:12:10 -08004912 //Disable tintless only if the property is set to 0
4913 memset(prop, 0, sizeof(prop));
4914 property_get("persist.camera.tintless.enable", prop, "1");
4915 int32_t tintless_value = atoi(prop);
4916
Thierry Strudel3d639192016-09-09 11:52:26 -07004917 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4918 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004919
Thierry Strudel3d639192016-09-09 11:52:26 -07004920 //Disable CDS for HFR mode or if DIS/EIS is on.
4921 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4922 //after every configure_stream
4923 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4924 (m_bIsVideo)) {
4925 int32_t cds = CAM_CDS_MODE_OFF;
4926 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4927 CAM_INTF_PARM_CDS_MODE, cds))
4928 LOGE("Failed to disable CDS for HFR mode");
4929
4930 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004931
4932 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4933 uint8_t* use_av_timer = NULL;
4934
4935 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004936 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004937 use_av_timer = &m_debug_avtimer;
4938 }
4939 else{
4940 use_av_timer =
4941 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004942 if (use_av_timer) {
4943 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4944 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004945 }
4946
4947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4948 rc = BAD_VALUE;
4949 }
4950 }
4951
Thierry Strudel3d639192016-09-09 11:52:26 -07004952 setMobicat();
4953
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004954 uint8_t nrMode = 0;
4955 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4956 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4957 }
4958
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 /* Set fps and hfr mode while sending meta stream info so that sensor
4960 * can configure appropriate streaming mode */
4961 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004962 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4963 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004964 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4965 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004966 if (rc == NO_ERROR) {
4967 int32_t max_fps =
4968 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004969 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004970 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4971 }
4972 /* For HFR, more buffers are dequeued upfront to improve the performance */
4973 if (mBatchSize) {
4974 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4975 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4976 }
4977 }
4978 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004979 LOGE("setHalFpsRange failed");
4980 }
4981 }
4982 if (meta.exists(ANDROID_CONTROL_MODE)) {
4983 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4984 rc = extractSceneMode(meta, metaMode, mParameters);
4985 if (rc != NO_ERROR) {
4986 LOGE("extractSceneMode failed");
4987 }
4988 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004989 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004990
Thierry Strudel04e026f2016-10-10 11:27:36 -07004991 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4992 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4993 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4994 rc = setVideoHdrMode(mParameters, vhdr);
4995 if (rc != NO_ERROR) {
4996 LOGE("setVideoHDR is failed");
4997 }
4998 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004999
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005000 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005001 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005002 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005003 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5005 sensorModeFullFov)) {
5006 rc = BAD_VALUE;
5007 }
5008 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 //TODO: validate the arguments, HSV scenemode should have only the
5010 //advertised fps ranges
5011
5012 /*set the capture intent, hal version, tintless, stream info,
5013 *and disenable parameters to the backend*/
5014 LOGD("set_parms META_STREAM_INFO " );
5015 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005016 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5017 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005018 mStreamConfigInfo.type[i],
5019 mStreamConfigInfo.stream_sizes[i].width,
5020 mStreamConfigInfo.stream_sizes[i].height,
5021 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005022 mStreamConfigInfo.format[i],
5023 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005024 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005025
Thierry Strudel3d639192016-09-09 11:52:26 -07005026 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5027 mParameters);
5028 if (rc < 0) {
5029 LOGE("set_parms failed for hal version, stream info");
5030 }
5031
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005032 cam_sensor_mode_info_t sensorModeInfo = {};
5033 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005034 if (rc != NO_ERROR) {
5035 LOGE("Failed to get sensor output size");
5036 pthread_mutex_unlock(&mMutex);
5037 goto error_exit;
5038 }
5039
5040 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5041 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005042 sensorModeInfo.active_array_size.width,
5043 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005044
5045 /* Set batchmode before initializing channel. Since registerBuffer
5046 * internally initializes some of the channels, better set batchmode
5047 * even before first register buffer */
5048 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5049 it != mStreamInfo.end(); it++) {
5050 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5051 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5052 && mBatchSize) {
5053 rc = channel->setBatchSize(mBatchSize);
5054 //Disable per frame map unmap for HFR/batchmode case
5055 rc |= channel->setPerFrameMapUnmap(false);
5056 if (NO_ERROR != rc) {
5057 LOGE("Channel init failed %d", rc);
5058 pthread_mutex_unlock(&mMutex);
5059 goto error_exit;
5060 }
5061 }
5062 }
5063
5064 //First initialize all streams
5065 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5066 it != mStreamInfo.end(); it++) {
5067 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005068
5069 /* Initial value of NR mode is needed before stream on */
5070 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005071 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5072 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005073 setEis) {
5074 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5075 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5076 is_type = mStreamConfigInfo.is_type[i];
5077 break;
5078 }
5079 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005080 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005081 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005082 rc = channel->initialize(IS_TYPE_NONE);
5083 }
5084 if (NO_ERROR != rc) {
5085 LOGE("Channel initialization failed %d", rc);
5086 pthread_mutex_unlock(&mMutex);
5087 goto error_exit;
5088 }
5089 }
5090
5091 if (mRawDumpChannel) {
5092 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5093 if (rc != NO_ERROR) {
5094 LOGE("Error: Raw Dump Channel init failed");
5095 pthread_mutex_unlock(&mMutex);
5096 goto error_exit;
5097 }
5098 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005099 if (mHdrPlusRawSrcChannel) {
5100 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5101 if (rc != NO_ERROR) {
5102 LOGE("Error: HDR+ RAW Source Channel init failed");
5103 pthread_mutex_unlock(&mMutex);
5104 goto error_exit;
5105 }
5106 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005107 if (mSupportChannel) {
5108 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5109 if (rc < 0) {
5110 LOGE("Support channel initialization failed");
5111 pthread_mutex_unlock(&mMutex);
5112 goto error_exit;
5113 }
5114 }
5115 if (mAnalysisChannel) {
5116 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5117 if (rc < 0) {
5118 LOGE("Analysis channel initialization failed");
5119 pthread_mutex_unlock(&mMutex);
5120 goto error_exit;
5121 }
5122 }
5123 if (mDummyBatchChannel) {
5124 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5125 if (rc < 0) {
5126 LOGE("mDummyBatchChannel setBatchSize failed");
5127 pthread_mutex_unlock(&mMutex);
5128 goto error_exit;
5129 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005130 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005131 if (rc < 0) {
5132 LOGE("mDummyBatchChannel initialization failed");
5133 pthread_mutex_unlock(&mMutex);
5134 goto error_exit;
5135 }
5136 }
5137
5138 // Set bundle info
5139 rc = setBundleInfo();
5140 if (rc < 0) {
5141 LOGE("setBundleInfo failed %d", rc);
5142 pthread_mutex_unlock(&mMutex);
5143 goto error_exit;
5144 }
5145
5146 //update settings from app here
5147 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5148 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5149 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5150 }
5151 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5152 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5153 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5154 }
5155 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5156 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5157 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5158
5159 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5160 (mLinkedCameraId != mCameraId) ) {
5161 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5162 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005163 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005164 goto error_exit;
5165 }
5166 }
5167
5168 // add bundle related cameras
5169 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5170 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005171 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5172 &m_pDualCamCmdPtr->bundle_info;
5173 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 if (mIsDeviceLinked)
5175 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5176 else
5177 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5178
5179 pthread_mutex_lock(&gCamLock);
5180
5181 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5182 LOGE("Dualcam: Invalid Session Id ");
5183 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005184 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005185 goto error_exit;
5186 }
5187
5188 if (mIsMainCamera == 1) {
5189 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5190 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005191 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005192 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005193 // related session id should be session id of linked session
5194 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5195 } else {
5196 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5197 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005198 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005199 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005200 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5201 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005202 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005203 pthread_mutex_unlock(&gCamLock);
5204
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005205 rc = mCameraHandle->ops->set_dual_cam_cmd(
5206 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005207 if (rc < 0) {
5208 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005209 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005210 goto error_exit;
5211 }
5212 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005213 goto no_error;
5214error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005215 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005216 return rc;
5217no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005218 mWokenUpByDaemon = false;
5219 mPendingLiveRequest = 0;
5220 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005221 }
5222
5223 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005224 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005225
5226 if (mFlushPerf) {
5227 //we cannot accept any requests during flush
5228 LOGE("process_capture_request cannot proceed during flush");
5229 pthread_mutex_unlock(&mMutex);
5230 return NO_ERROR; //should return an error
5231 }
5232
5233 if (meta.exists(ANDROID_REQUEST_ID)) {
5234 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5235 mCurrentRequestId = request_id;
5236 LOGD("Received request with id: %d", request_id);
5237 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5238 LOGE("Unable to find request id field, \
5239 & no previous id available");
5240 pthread_mutex_unlock(&mMutex);
5241 return NAME_NOT_FOUND;
5242 } else {
5243 LOGD("Re-using old request id");
5244 request_id = mCurrentRequestId;
5245 }
5246
5247 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5248 request->num_output_buffers,
5249 request->input_buffer,
5250 frameNumber);
5251 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005252 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005253 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005254 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005255 uint32_t snapshotStreamId = 0;
5256 for (size_t i = 0; i < request->num_output_buffers; i++) {
5257 const camera3_stream_buffer_t& output = request->output_buffers[i];
5258 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5259
Emilian Peev7650c122017-01-19 08:24:33 -08005260 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5261 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005262 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005263 blob_request = 1;
5264 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5265 }
5266
5267 if (output.acquire_fence != -1) {
5268 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5269 close(output.acquire_fence);
5270 if (rc != OK) {
5271 LOGE("sync wait failed %d", rc);
5272 pthread_mutex_unlock(&mMutex);
5273 return rc;
5274 }
5275 }
5276
Emilian Peev0f3c3162017-03-15 12:57:46 +00005277 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5278 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005279 depthRequestPresent = true;
5280 continue;
5281 }
5282
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005283 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005284 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005285
5286 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5287 isVidBufRequested = true;
5288 }
5289 }
5290
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005291 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5292 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5293 itr++) {
5294 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5295 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5296 channel->getStreamID(channel->getStreamTypeMask());
5297
5298 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5299 isVidBufRequested = true;
5300 }
5301 }
5302
Thierry Strudel3d639192016-09-09 11:52:26 -07005303 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005304 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005305 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005306 }
5307 if (blob_request && mRawDumpChannel) {
5308 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005309 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005311 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005312 }
5313
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005314 {
5315 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5316 // Request a RAW buffer if
5317 // 1. mHdrPlusRawSrcChannel is valid.
5318 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5319 // 3. There is no pending HDR+ request.
5320 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5321 mHdrPlusPendingRequests.size() == 0) {
5322 streamsArray.stream_request[streamsArray.num_streams].streamID =
5323 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5324 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5325 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005326 }
5327
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005328 //extract capture intent
5329 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5330 mCaptureIntent =
5331 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5332 }
5333
5334 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5335 mCacMode =
5336 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5337 }
5338
5339 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005340 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005341
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005342 {
5343 Mutex::Autolock l(gHdrPlusClientLock);
5344 // If this request has a still capture intent, try to submit an HDR+ request.
5345 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5346 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5347 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5348 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005349 }
5350
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005351 if (hdrPlusRequest) {
5352 // For a HDR+ request, just set the frame parameters.
5353 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5354 if (rc < 0) {
5355 LOGE("fail to set frame parameters");
5356 pthread_mutex_unlock(&mMutex);
5357 return rc;
5358 }
5359 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005360 /* Parse the settings:
5361 * - For every request in NORMAL MODE
5362 * - For every request in HFR mode during preview only case
5363 * - For first request of every batch in HFR mode during video
5364 * recording. In batchmode the same settings except frame number is
5365 * repeated in each request of the batch.
5366 */
5367 if (!mBatchSize ||
5368 (mBatchSize && !isVidBufRequested) ||
5369 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005370 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005371 if (rc < 0) {
5372 LOGE("fail to set frame parameters");
5373 pthread_mutex_unlock(&mMutex);
5374 return rc;
5375 }
5376 }
5377 /* For batchMode HFR, setFrameParameters is not called for every
5378 * request. But only frame number of the latest request is parsed.
5379 * Keep track of first and last frame numbers in a batch so that
5380 * metadata for the frame numbers of batch can be duplicated in
5381 * handleBatchMetadta */
5382 if (mBatchSize) {
5383 if (!mToBeQueuedVidBufs) {
5384 //start of the batch
5385 mFirstFrameNumberInBatch = request->frame_number;
5386 }
5387 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5388 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5389 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005390 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005391 return BAD_VALUE;
5392 }
5393 }
5394 if (mNeedSensorRestart) {
5395 /* Unlock the mutex as restartSensor waits on the channels to be
5396 * stopped, which in turn calls stream callback functions -
5397 * handleBufferWithLock and handleMetadataWithLock */
5398 pthread_mutex_unlock(&mMutex);
5399 rc = dynamicUpdateMetaStreamInfo();
5400 if (rc != NO_ERROR) {
5401 LOGE("Restarting the sensor failed");
5402 return BAD_VALUE;
5403 }
5404 mNeedSensorRestart = false;
5405 pthread_mutex_lock(&mMutex);
5406 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005407 if(mResetInstantAEC) {
5408 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5409 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5410 mResetInstantAEC = false;
5411 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005412 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005413 if (request->input_buffer->acquire_fence != -1) {
5414 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5415 close(request->input_buffer->acquire_fence);
5416 if (rc != OK) {
5417 LOGE("input buffer sync wait failed %d", rc);
5418 pthread_mutex_unlock(&mMutex);
5419 return rc;
5420 }
5421 }
5422 }
5423
5424 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5425 mLastCustIntentFrmNum = frameNumber;
5426 }
5427 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005428 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005429 pendingRequestIterator latestRequest;
5430 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005431 pendingRequest.num_buffers = depthRequestPresent ?
5432 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005433 pendingRequest.request_id = request_id;
5434 pendingRequest.blob_request = blob_request;
5435 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005436 if (request->input_buffer) {
5437 pendingRequest.input_buffer =
5438 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5439 *(pendingRequest.input_buffer) = *(request->input_buffer);
5440 pInputBuffer = pendingRequest.input_buffer;
5441 } else {
5442 pendingRequest.input_buffer = NULL;
5443 pInputBuffer = NULL;
5444 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005445 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005446
5447 pendingRequest.pipeline_depth = 0;
5448 pendingRequest.partial_result_cnt = 0;
5449 extractJpegMetadata(mCurJpegMeta, request);
5450 pendingRequest.jpegMetadata = mCurJpegMeta;
5451 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005452 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005453 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5454 mHybridAeEnable =
5455 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5456 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005457
5458 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5459 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005460 /* DevCamDebug metadata processCaptureRequest */
5461 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5462 mDevCamDebugMetaEnable =
5463 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5464 }
5465 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5466 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005467
5468 //extract CAC info
5469 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5470 mCacMode =
5471 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5472 }
5473 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005474 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005475
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005476 // extract enableZsl info
5477 if (gExposeEnableZslKey) {
5478 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5479 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5480 mZslEnabled = pendingRequest.enableZsl;
5481 } else {
5482 pendingRequest.enableZsl = mZslEnabled;
5483 }
5484 }
5485
Thierry Strudel3d639192016-09-09 11:52:26 -07005486 PendingBuffersInRequest bufsForCurRequest;
5487 bufsForCurRequest.frame_number = frameNumber;
5488 // Mark current timestamp for the new request
5489 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005490 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005491
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005492 if (hdrPlusRequest) {
5493 // Save settings for this request.
5494 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5495 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5496
5497 // Add to pending HDR+ request queue.
5498 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5499 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5500
5501 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5502 }
5503
Thierry Strudel3d639192016-09-09 11:52:26 -07005504 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005505 if ((request->output_buffers[i].stream->data_space ==
5506 HAL_DATASPACE_DEPTH) &&
5507 (HAL_PIXEL_FORMAT_BLOB ==
5508 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005509 continue;
5510 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005511 RequestedBufferInfo requestedBuf;
5512 memset(&requestedBuf, 0, sizeof(requestedBuf));
5513 requestedBuf.stream = request->output_buffers[i].stream;
5514 requestedBuf.buffer = NULL;
5515 pendingRequest.buffers.push_back(requestedBuf);
5516
5517 // Add to buffer handle the pending buffers list
5518 PendingBufferInfo bufferInfo;
5519 bufferInfo.buffer = request->output_buffers[i].buffer;
5520 bufferInfo.stream = request->output_buffers[i].stream;
5521 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5522 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5523 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5524 frameNumber, bufferInfo.buffer,
5525 channel->getStreamTypeMask(), bufferInfo.stream->format);
5526 }
5527 // Add this request packet into mPendingBuffersMap
5528 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5529 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5530 mPendingBuffersMap.get_num_overall_buffers());
5531
5532 latestRequest = mPendingRequestsList.insert(
5533 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005534
5535 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5536 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005537 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005538 for (size_t i = 0; i < request->num_output_buffers; i++) {
5539 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5540 }
5541
Thierry Strudel3d639192016-09-09 11:52:26 -07005542 if(mFlush) {
5543 LOGI("mFlush is true");
5544 pthread_mutex_unlock(&mMutex);
5545 return NO_ERROR;
5546 }
5547
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005548 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5549 // channel.
5550 if (!hdrPlusRequest) {
5551 int indexUsed;
5552 // Notify metadata channel we receive a request
5553 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005554
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005555 if(request->input_buffer != NULL){
5556 LOGD("Input request, frame_number %d", frameNumber);
5557 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5558 if (NO_ERROR != rc) {
5559 LOGE("fail to set reproc parameters");
5560 pthread_mutex_unlock(&mMutex);
5561 return rc;
5562 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005563 }
5564
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005565 // Call request on other streams
5566 uint32_t streams_need_metadata = 0;
5567 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5568 for (size_t i = 0; i < request->num_output_buffers; i++) {
5569 const camera3_stream_buffer_t& output = request->output_buffers[i];
5570 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5571
5572 if (channel == NULL) {
5573 LOGW("invalid channel pointer for stream");
5574 continue;
5575 }
5576
5577 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5578 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5579 output.buffer, request->input_buffer, frameNumber);
5580 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005581 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005582 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5583 if (rc < 0) {
5584 LOGE("Fail to request on picture channel");
5585 pthread_mutex_unlock(&mMutex);
5586 return rc;
5587 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005588 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005589 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5590 assert(NULL != mDepthChannel);
5591 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005592
Emilian Peev7650c122017-01-19 08:24:33 -08005593 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5594 if (rc < 0) {
5595 LOGE("Fail to map on depth buffer");
5596 pthread_mutex_unlock(&mMutex);
5597 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005598 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005599 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005600 } else {
5601 LOGD("snapshot request with buffer %p, frame_number %d",
5602 output.buffer, frameNumber);
5603 if (!request->settings) {
5604 rc = channel->request(output.buffer, frameNumber,
5605 NULL, mPrevParameters, indexUsed);
5606 } else {
5607 rc = channel->request(output.buffer, frameNumber,
5608 NULL, mParameters, indexUsed);
5609 }
5610 if (rc < 0) {
5611 LOGE("Fail to request on picture channel");
5612 pthread_mutex_unlock(&mMutex);
5613 return rc;
5614 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005615
Emilian Peev7650c122017-01-19 08:24:33 -08005616 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5617 uint32_t j = 0;
5618 for (j = 0; j < streamsArray.num_streams; j++) {
5619 if (streamsArray.stream_request[j].streamID == streamId) {
5620 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5621 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5622 else
5623 streamsArray.stream_request[j].buf_index = indexUsed;
5624 break;
5625 }
5626 }
5627 if (j == streamsArray.num_streams) {
5628 LOGE("Did not find matching stream to update index");
5629 assert(0);
5630 }
5631
5632 pendingBufferIter->need_metadata = true;
5633 streams_need_metadata++;
5634 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005635 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005636 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5637 bool needMetadata = false;
5638 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5639 rc = yuvChannel->request(output.buffer, frameNumber,
5640 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5641 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005642 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005643 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005644 pthread_mutex_unlock(&mMutex);
5645 return rc;
5646 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005647
5648 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5649 uint32_t j = 0;
5650 for (j = 0; j < streamsArray.num_streams; j++) {
5651 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005652 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5653 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5654 else
5655 streamsArray.stream_request[j].buf_index = indexUsed;
5656 break;
5657 }
5658 }
5659 if (j == streamsArray.num_streams) {
5660 LOGE("Did not find matching stream to update index");
5661 assert(0);
5662 }
5663
5664 pendingBufferIter->need_metadata = needMetadata;
5665 if (needMetadata)
5666 streams_need_metadata += 1;
5667 LOGD("calling YUV channel request, need_metadata is %d",
5668 needMetadata);
5669 } else {
5670 LOGD("request with buffer %p, frame_number %d",
5671 output.buffer, frameNumber);
5672
5673 rc = channel->request(output.buffer, frameNumber, indexUsed);
5674
5675 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5676 uint32_t j = 0;
5677 for (j = 0; j < streamsArray.num_streams; j++) {
5678 if (streamsArray.stream_request[j].streamID == streamId) {
5679 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5680 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5681 else
5682 streamsArray.stream_request[j].buf_index = indexUsed;
5683 break;
5684 }
5685 }
5686 if (j == streamsArray.num_streams) {
5687 LOGE("Did not find matching stream to update index");
5688 assert(0);
5689 }
5690
5691 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5692 && mBatchSize) {
5693 mToBeQueuedVidBufs++;
5694 if (mToBeQueuedVidBufs == mBatchSize) {
5695 channel->queueBatchBuf();
5696 }
5697 }
5698 if (rc < 0) {
5699 LOGE("request failed");
5700 pthread_mutex_unlock(&mMutex);
5701 return rc;
5702 }
5703 }
5704 pendingBufferIter++;
5705 }
5706
5707 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5708 itr++) {
5709 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5710
5711 if (channel == NULL) {
5712 LOGE("invalid channel pointer for stream");
5713 assert(0);
5714 return BAD_VALUE;
5715 }
5716
5717 InternalRequest requestedStream;
5718 requestedStream = (*itr);
5719
5720
5721 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5722 LOGD("snapshot request internally input buffer %p, frame_number %d",
5723 request->input_buffer, frameNumber);
5724 if(request->input_buffer != NULL){
5725 rc = channel->request(NULL, frameNumber,
5726 pInputBuffer, &mReprocMeta, indexUsed, true,
5727 requestedStream.meteringOnly);
5728 if (rc < 0) {
5729 LOGE("Fail to request on picture channel");
5730 pthread_mutex_unlock(&mMutex);
5731 return rc;
5732 }
5733 } else {
5734 LOGD("snapshot request with frame_number %d", frameNumber);
5735 if (!request->settings) {
5736 rc = channel->request(NULL, frameNumber,
5737 NULL, mPrevParameters, indexUsed, true,
5738 requestedStream.meteringOnly);
5739 } else {
5740 rc = channel->request(NULL, frameNumber,
5741 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5742 }
5743 if (rc < 0) {
5744 LOGE("Fail to request on picture channel");
5745 pthread_mutex_unlock(&mMutex);
5746 return rc;
5747 }
5748
5749 if ((*itr).meteringOnly != 1) {
5750 requestedStream.need_metadata = 1;
5751 streams_need_metadata++;
5752 }
5753 }
5754
5755 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5756 uint32_t j = 0;
5757 for (j = 0; j < streamsArray.num_streams; j++) {
5758 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005759 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5760 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5761 else
5762 streamsArray.stream_request[j].buf_index = indexUsed;
5763 break;
5764 }
5765 }
5766 if (j == streamsArray.num_streams) {
5767 LOGE("Did not find matching stream to update index");
5768 assert(0);
5769 }
5770
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005771 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005772 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005773 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005774 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005775 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005776 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005777 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005778
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005779 //If 2 streams have need_metadata set to true, fail the request, unless
5780 //we copy/reference count the metadata buffer
5781 if (streams_need_metadata > 1) {
5782 LOGE("not supporting request in which two streams requires"
5783 " 2 HAL metadata for reprocessing");
5784 pthread_mutex_unlock(&mMutex);
5785 return -EINVAL;
5786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005787
Emilian Peev656e4fa2017-06-02 16:47:04 +01005788 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5789 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5790 if (depthRequestPresent && mDepthChannel) {
5791 if (request->settings) {
5792 camera_metadata_ro_entry entry;
5793 if (find_camera_metadata_ro_entry(request->settings,
5794 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5795 if (entry.data.u8[0]) {
5796 pdafEnable = CAM_PD_DATA_ENABLED;
5797 } else {
5798 pdafEnable = CAM_PD_DATA_SKIP;
5799 }
5800 mDepthCloudMode = pdafEnable;
5801 } else {
5802 pdafEnable = mDepthCloudMode;
5803 }
5804 } else {
5805 pdafEnable = mDepthCloudMode;
5806 }
5807 }
5808
Emilian Peev7650c122017-01-19 08:24:33 -08005809 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5810 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5811 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5812 pthread_mutex_unlock(&mMutex);
5813 return BAD_VALUE;
5814 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005815
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005816 if (request->input_buffer == NULL) {
5817 /* Set the parameters to backend:
5818 * - For every request in NORMAL MODE
5819 * - For every request in HFR mode during preview only case
5820 * - Once every batch in HFR mode during video recording
5821 */
5822 if (!mBatchSize ||
5823 (mBatchSize && !isVidBufRequested) ||
5824 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5825 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5826 mBatchSize, isVidBufRequested,
5827 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005828
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005829 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5830 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5831 uint32_t m = 0;
5832 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5833 if (streamsArray.stream_request[k].streamID ==
5834 mBatchedStreamsArray.stream_request[m].streamID)
5835 break;
5836 }
5837 if (m == mBatchedStreamsArray.num_streams) {
5838 mBatchedStreamsArray.stream_request\
5839 [mBatchedStreamsArray.num_streams].streamID =
5840 streamsArray.stream_request[k].streamID;
5841 mBatchedStreamsArray.stream_request\
5842 [mBatchedStreamsArray.num_streams].buf_index =
5843 streamsArray.stream_request[k].buf_index;
5844 mBatchedStreamsArray.num_streams =
5845 mBatchedStreamsArray.num_streams + 1;
5846 }
5847 }
5848 streamsArray = mBatchedStreamsArray;
5849 }
5850 /* Update stream id of all the requested buffers */
5851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5852 streamsArray)) {
5853 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005854 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005855 return BAD_VALUE;
5856 }
5857
5858 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5859 mParameters);
5860 if (rc < 0) {
5861 LOGE("set_parms failed");
5862 }
5863 /* reset to zero coz, the batch is queued */
5864 mToBeQueuedVidBufs = 0;
5865 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5866 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5867 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005868 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5869 uint32_t m = 0;
5870 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5871 if (streamsArray.stream_request[k].streamID ==
5872 mBatchedStreamsArray.stream_request[m].streamID)
5873 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005874 }
5875 if (m == mBatchedStreamsArray.num_streams) {
5876 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5877 streamID = streamsArray.stream_request[k].streamID;
5878 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5879 buf_index = streamsArray.stream_request[k].buf_index;
5880 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5881 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005882 }
5883 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005884 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005885
5886 // Start all streams after the first setting is sent, so that the
5887 // setting can be applied sooner: (0 + apply_delay)th frame.
5888 if (mState == CONFIGURED && mChannelHandle) {
5889 //Then start them.
5890 LOGH("Start META Channel");
5891 rc = mMetadataChannel->start();
5892 if (rc < 0) {
5893 LOGE("META channel start failed");
5894 pthread_mutex_unlock(&mMutex);
5895 return rc;
5896 }
5897
5898 if (mAnalysisChannel) {
5899 rc = mAnalysisChannel->start();
5900 if (rc < 0) {
5901 LOGE("Analysis channel start failed");
5902 mMetadataChannel->stop();
5903 pthread_mutex_unlock(&mMutex);
5904 return rc;
5905 }
5906 }
5907
5908 if (mSupportChannel) {
5909 rc = mSupportChannel->start();
5910 if (rc < 0) {
5911 LOGE("Support channel start failed");
5912 mMetadataChannel->stop();
5913 /* Although support and analysis are mutually exclusive today
5914 adding it in anycase for future proofing */
5915 if (mAnalysisChannel) {
5916 mAnalysisChannel->stop();
5917 }
5918 pthread_mutex_unlock(&mMutex);
5919 return rc;
5920 }
5921 }
5922 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5923 it != mStreamInfo.end(); it++) {
5924 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5925 LOGH("Start Processing Channel mask=%d",
5926 channel->getStreamTypeMask());
5927 rc = channel->start();
5928 if (rc < 0) {
5929 LOGE("channel start failed");
5930 pthread_mutex_unlock(&mMutex);
5931 return rc;
5932 }
5933 }
5934
5935 if (mRawDumpChannel) {
5936 LOGD("Starting raw dump stream");
5937 rc = mRawDumpChannel->start();
5938 if (rc != NO_ERROR) {
5939 LOGE("Error Starting Raw Dump Channel");
5940 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5941 it != mStreamInfo.end(); it++) {
5942 QCamera3Channel *channel =
5943 (QCamera3Channel *)(*it)->stream->priv;
5944 LOGH("Stopping Processing Channel mask=%d",
5945 channel->getStreamTypeMask());
5946 channel->stop();
5947 }
5948 if (mSupportChannel)
5949 mSupportChannel->stop();
5950 if (mAnalysisChannel) {
5951 mAnalysisChannel->stop();
5952 }
5953 mMetadataChannel->stop();
5954 pthread_mutex_unlock(&mMutex);
5955 return rc;
5956 }
5957 }
5958
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005959 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005960 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005961 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005962 if (rc != NO_ERROR) {
5963 LOGE("start_channel failed %d", rc);
5964 pthread_mutex_unlock(&mMutex);
5965 return rc;
5966 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005967
5968 {
5969 // Configure Easel for stream on.
5970 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005971
5972 // Now that sensor mode should have been selected, get the selected sensor mode
5973 // info.
5974 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5975 getCurrentSensorModeInfo(mSensorModeInfo);
5976
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005977 if (EaselManagerClientOpened) {
5978 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen44abb642017-06-02 18:00:38 -07005979 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5980 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005981 if (rc != OK) {
5982 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5983 mCameraId, mSensorModeInfo.op_pixel_clk);
5984 pthread_mutex_unlock(&mMutex);
5985 return rc;
5986 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005987 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005988 }
5989 }
5990
5991 // Start sensor streaming.
5992 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5993 mChannelHandle);
5994 if (rc != NO_ERROR) {
5995 LOGE("start_sensor_stream_on failed %d", rc);
5996 pthread_mutex_unlock(&mMutex);
5997 return rc;
5998 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005999 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006000 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006001 }
6002
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006003 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07006004 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006005 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -07006006 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006007 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6008 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6009 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6010 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
6011 rc = enableHdrPlusModeLocked();
6012 if (rc != OK) {
6013 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6014 pthread_mutex_unlock(&mMutex);
6015 return rc;
6016 }
6017
6018 mFirstPreviewIntentSeen = true;
6019 }
6020 }
6021
Thierry Strudel3d639192016-09-09 11:52:26 -07006022 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6023
6024 mState = STARTED;
6025 // Added a timed condition wait
6026 struct timespec ts;
6027 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006028 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006029 if (rc < 0) {
6030 isValidTimeout = 0;
6031 LOGE("Error reading the real time clock!!");
6032 }
6033 else {
6034 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006035 int64_t timeout = 5;
6036 {
6037 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6038 // If there is a pending HDR+ request, the following requests may be blocked until the
6039 // HDR+ request is done. So allow a longer timeout.
6040 if (mHdrPlusPendingRequests.size() > 0) {
6041 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6042 }
6043 }
6044 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006045 }
6046 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006047 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006048 (mState != ERROR) && (mState != DEINIT)) {
6049 if (!isValidTimeout) {
6050 LOGD("Blocking on conditional wait");
6051 pthread_cond_wait(&mRequestCond, &mMutex);
6052 }
6053 else {
6054 LOGD("Blocking on timed conditional wait");
6055 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6056 if (rc == ETIMEDOUT) {
6057 rc = -ENODEV;
6058 LOGE("Unblocked on timeout!!!!");
6059 break;
6060 }
6061 }
6062 LOGD("Unblocked");
6063 if (mWokenUpByDaemon) {
6064 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006065 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006066 break;
6067 }
6068 }
6069 pthread_mutex_unlock(&mMutex);
6070
6071 return rc;
6072}
6073
6074/*===========================================================================
6075 * FUNCTION : dump
6076 *
6077 * DESCRIPTION:
6078 *
6079 * PARAMETERS :
6080 *
6081 *
6082 * RETURN :
6083 *==========================================================================*/
6084void QCamera3HardwareInterface::dump(int fd)
6085{
6086 pthread_mutex_lock(&mMutex);
6087 dprintf(fd, "\n Camera HAL3 information Begin \n");
6088
6089 dprintf(fd, "\nNumber of pending requests: %zu \n",
6090 mPendingRequestsList.size());
6091 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6092 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6093 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6094 for(pendingRequestIterator i = mPendingRequestsList.begin();
6095 i != mPendingRequestsList.end(); i++) {
6096 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6097 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6098 i->input_buffer);
6099 }
6100 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6101 mPendingBuffersMap.get_num_overall_buffers());
6102 dprintf(fd, "-------+------------------\n");
6103 dprintf(fd, " Frame | Stream type mask \n");
6104 dprintf(fd, "-------+------------------\n");
6105 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6106 for(auto &j : req.mPendingBufferList) {
6107 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6108 dprintf(fd, " %5d | %11d \n",
6109 req.frame_number, channel->getStreamTypeMask());
6110 }
6111 }
6112 dprintf(fd, "-------+------------------\n");
6113
6114 dprintf(fd, "\nPending frame drop list: %zu\n",
6115 mPendingFrameDropList.size());
6116 dprintf(fd, "-------+-----------\n");
6117 dprintf(fd, " Frame | Stream ID \n");
6118 dprintf(fd, "-------+-----------\n");
6119 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6120 i != mPendingFrameDropList.end(); i++) {
6121 dprintf(fd, " %5d | %9d \n",
6122 i->frame_number, i->stream_ID);
6123 }
6124 dprintf(fd, "-------+-----------\n");
6125
6126 dprintf(fd, "\n Camera HAL3 information End \n");
6127
6128 /* use dumpsys media.camera as trigger to send update debug level event */
6129 mUpdateDebugLevel = true;
6130 pthread_mutex_unlock(&mMutex);
6131 return;
6132}
6133
6134/*===========================================================================
6135 * FUNCTION : flush
6136 *
6137 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6138 * conditionally restarts channels
6139 *
6140 * PARAMETERS :
6141 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006142 * @ stopChannelImmediately: stop the channel immediately. This should be used
6143 * when device encountered an error and MIPI may has
6144 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006145 *
6146 * RETURN :
6147 * 0 on success
6148 * Error code on failure
6149 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006150int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006151{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006152 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006153 int32_t rc = NO_ERROR;
6154
6155 LOGD("Unblocking Process Capture Request");
6156 pthread_mutex_lock(&mMutex);
6157 mFlush = true;
6158 pthread_mutex_unlock(&mMutex);
6159
6160 rc = stopAllChannels();
6161 // unlink of dualcam
6162 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006163 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6164 &m_pDualCamCmdPtr->bundle_info;
6165 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006166 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6167 pthread_mutex_lock(&gCamLock);
6168
6169 if (mIsMainCamera == 1) {
6170 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6171 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006172 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006173 // related session id should be session id of linked session
6174 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6175 } else {
6176 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6177 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006178 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006179 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6180 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006181 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006182 pthread_mutex_unlock(&gCamLock);
6183
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006184 rc = mCameraHandle->ops->set_dual_cam_cmd(
6185 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006186 if (rc < 0) {
6187 LOGE("Dualcam: Unlink failed, but still proceed to close");
6188 }
6189 }
6190
6191 if (rc < 0) {
6192 LOGE("stopAllChannels failed");
6193 return rc;
6194 }
6195 if (mChannelHandle) {
6196 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006197 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006198 }
6199
6200 // Reset bundle info
6201 rc = setBundleInfo();
6202 if (rc < 0) {
6203 LOGE("setBundleInfo failed %d", rc);
6204 return rc;
6205 }
6206
6207 // Mutex Lock
6208 pthread_mutex_lock(&mMutex);
6209
6210 // Unblock process_capture_request
6211 mPendingLiveRequest = 0;
6212 pthread_cond_signal(&mRequestCond);
6213
6214 rc = notifyErrorForPendingRequests();
6215 if (rc < 0) {
6216 LOGE("notifyErrorForPendingRequests failed");
6217 pthread_mutex_unlock(&mMutex);
6218 return rc;
6219 }
6220
6221 mFlush = false;
6222
6223 // Start the Streams/Channels
6224 if (restartChannels) {
6225 rc = startAllChannels();
6226 if (rc < 0) {
6227 LOGE("startAllChannels failed");
6228 pthread_mutex_unlock(&mMutex);
6229 return rc;
6230 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006231 if (mChannelHandle) {
6232 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006233 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006234 if (rc < 0) {
6235 LOGE("start_channel failed");
6236 pthread_mutex_unlock(&mMutex);
6237 return rc;
6238 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006239 }
6240 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006241 pthread_mutex_unlock(&mMutex);
6242
6243 return 0;
6244}
6245
6246/*===========================================================================
6247 * FUNCTION : flushPerf
6248 *
6249 * DESCRIPTION: This is the performance optimization version of flush that does
6250 * not use stream off, rather flushes the system
6251 *
6252 * PARAMETERS :
6253 *
6254 *
6255 * RETURN : 0 : success
6256 * -EINVAL: input is malformed (device is not valid)
6257 * -ENODEV: if the device has encountered a serious error
6258 *==========================================================================*/
6259int QCamera3HardwareInterface::flushPerf()
6260{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006261 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006262 int32_t rc = 0;
6263 struct timespec timeout;
6264 bool timed_wait = false;
6265
6266 pthread_mutex_lock(&mMutex);
6267 mFlushPerf = true;
6268 mPendingBuffersMap.numPendingBufsAtFlush =
6269 mPendingBuffersMap.get_num_overall_buffers();
6270 LOGD("Calling flush. Wait for %d buffers to return",
6271 mPendingBuffersMap.numPendingBufsAtFlush);
6272
6273 /* send the flush event to the backend */
6274 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6275 if (rc < 0) {
6276 LOGE("Error in flush: IOCTL failure");
6277 mFlushPerf = false;
6278 pthread_mutex_unlock(&mMutex);
6279 return -ENODEV;
6280 }
6281
6282 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6283 LOGD("No pending buffers in HAL, return flush");
6284 mFlushPerf = false;
6285 pthread_mutex_unlock(&mMutex);
6286 return rc;
6287 }
6288
6289 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006290 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006291 if (rc < 0) {
6292 LOGE("Error reading the real time clock, cannot use timed wait");
6293 } else {
6294 timeout.tv_sec += FLUSH_TIMEOUT;
6295 timed_wait = true;
6296 }
6297
6298 //Block on conditional variable
6299 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6300 LOGD("Waiting on mBuffersCond");
6301 if (!timed_wait) {
6302 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6303 if (rc != 0) {
6304 LOGE("pthread_cond_wait failed due to rc = %s",
6305 strerror(rc));
6306 break;
6307 }
6308 } else {
6309 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6310 if (rc != 0) {
6311 LOGE("pthread_cond_timedwait failed due to rc = %s",
6312 strerror(rc));
6313 break;
6314 }
6315 }
6316 }
6317 if (rc != 0) {
6318 mFlushPerf = false;
6319 pthread_mutex_unlock(&mMutex);
6320 return -ENODEV;
6321 }
6322
6323 LOGD("Received buffers, now safe to return them");
6324
6325 //make sure the channels handle flush
6326 //currently only required for the picture channel to release snapshot resources
6327 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6328 it != mStreamInfo.end(); it++) {
6329 QCamera3Channel *channel = (*it)->channel;
6330 if (channel) {
6331 rc = channel->flush();
6332 if (rc) {
6333 LOGE("Flushing the channels failed with error %d", rc);
6334 // even though the channel flush failed we need to continue and
6335 // return the buffers we have to the framework, however the return
6336 // value will be an error
6337 rc = -ENODEV;
6338 }
6339 }
6340 }
6341
6342 /* notify the frameworks and send errored results */
6343 rc = notifyErrorForPendingRequests();
6344 if (rc < 0) {
6345 LOGE("notifyErrorForPendingRequests failed");
6346 pthread_mutex_unlock(&mMutex);
6347 return rc;
6348 }
6349
6350 //unblock process_capture_request
6351 mPendingLiveRequest = 0;
6352 unblockRequestIfNecessary();
6353
6354 mFlushPerf = false;
6355 pthread_mutex_unlock(&mMutex);
6356 LOGD ("Flush Operation complete. rc = %d", rc);
6357 return rc;
6358}
6359
6360/*===========================================================================
6361 * FUNCTION : handleCameraDeviceError
6362 *
6363 * DESCRIPTION: This function calls internal flush and notifies the error to
6364 * framework and updates the state variable.
6365 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006366 * PARAMETERS :
6367 * @stopChannelImmediately : stop channels immediately without waiting for
6368 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006369 *
6370 * RETURN : NO_ERROR on Success
6371 * Error code on failure
6372 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006373int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006374{
6375 int32_t rc = NO_ERROR;
6376
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006377 {
6378 Mutex::Autolock lock(mFlushLock);
6379 pthread_mutex_lock(&mMutex);
6380 if (mState != ERROR) {
6381 //if mState != ERROR, nothing to be done
6382 pthread_mutex_unlock(&mMutex);
6383 return NO_ERROR;
6384 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006385 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006386
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006387 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006388 if (NO_ERROR != rc) {
6389 LOGE("internal flush to handle mState = ERROR failed");
6390 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006391
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006392 pthread_mutex_lock(&mMutex);
6393 mState = DEINIT;
6394 pthread_mutex_unlock(&mMutex);
6395 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006396
6397 camera3_notify_msg_t notify_msg;
6398 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6399 notify_msg.type = CAMERA3_MSG_ERROR;
6400 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6401 notify_msg.message.error.error_stream = NULL;
6402 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006403 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006404
6405 return rc;
6406}
6407
6408/*===========================================================================
6409 * FUNCTION : captureResultCb
6410 *
6411 * DESCRIPTION: Callback handler for all capture result
6412 * (streams, as well as metadata)
6413 *
6414 * PARAMETERS :
6415 * @metadata : metadata information
6416 * @buffer : actual gralloc buffer to be returned to frameworks.
6417 * NULL if metadata.
6418 *
6419 * RETURN : NONE
6420 *==========================================================================*/
6421void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6422 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6423{
6424 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006425 pthread_mutex_lock(&mMutex);
6426 uint8_t batchSize = mBatchSize;
6427 pthread_mutex_unlock(&mMutex);
6428 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006429 handleBatchMetadata(metadata_buf,
6430 true /* free_and_bufdone_meta_buf */);
6431 } else { /* mBatchSize = 0 */
6432 hdrPlusPerfLock(metadata_buf);
6433 pthread_mutex_lock(&mMutex);
6434 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006435 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006436 true /* last urgent frame of batch metadata */,
6437 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006438 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006439 pthread_mutex_unlock(&mMutex);
6440 }
6441 } else if (isInputBuffer) {
6442 pthread_mutex_lock(&mMutex);
6443 handleInputBufferWithLock(frame_number);
6444 pthread_mutex_unlock(&mMutex);
6445 } else {
6446 pthread_mutex_lock(&mMutex);
6447 handleBufferWithLock(buffer, frame_number);
6448 pthread_mutex_unlock(&mMutex);
6449 }
6450 return;
6451}
6452
6453/*===========================================================================
6454 * FUNCTION : getReprocessibleOutputStreamId
6455 *
6456 * DESCRIPTION: Get source output stream id for the input reprocess stream
6457 * based on size and format, which would be the largest
6458 * output stream if an input stream exists.
6459 *
6460 * PARAMETERS :
6461 * @id : return the stream id if found
6462 *
6463 * RETURN : int32_t type of status
6464 * NO_ERROR -- success
6465 * none-zero failure code
6466 *==========================================================================*/
6467int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6468{
6469 /* check if any output or bidirectional stream with the same size and format
6470 and return that stream */
6471 if ((mInputStreamInfo.dim.width > 0) &&
6472 (mInputStreamInfo.dim.height > 0)) {
6473 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6474 it != mStreamInfo.end(); it++) {
6475
6476 camera3_stream_t *stream = (*it)->stream;
6477 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6478 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6479 (stream->format == mInputStreamInfo.format)) {
6480 // Usage flag for an input stream and the source output stream
6481 // may be different.
6482 LOGD("Found reprocessible output stream! %p", *it);
6483 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6484 stream->usage, mInputStreamInfo.usage);
6485
6486 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6487 if (channel != NULL && channel->mStreams[0]) {
6488 id = channel->mStreams[0]->getMyServerID();
6489 return NO_ERROR;
6490 }
6491 }
6492 }
6493 } else {
6494 LOGD("No input stream, so no reprocessible output stream");
6495 }
6496 return NAME_NOT_FOUND;
6497}
6498
6499/*===========================================================================
6500 * FUNCTION : lookupFwkName
6501 *
6502 * DESCRIPTION: In case the enum is not same in fwk and backend
6503 * make sure the parameter is correctly propogated
6504 *
6505 * PARAMETERS :
6506 * @arr : map between the two enums
6507 * @len : len of the map
6508 * @hal_name : name of the hal_parm to map
6509 *
6510 * RETURN : int type of status
6511 * fwk_name -- success
6512 * none-zero failure code
6513 *==========================================================================*/
6514template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6515 size_t len, halType hal_name)
6516{
6517
6518 for (size_t i = 0; i < len; i++) {
6519 if (arr[i].hal_name == hal_name) {
6520 return arr[i].fwk_name;
6521 }
6522 }
6523
6524 /* Not able to find matching framework type is not necessarily
6525 * an error case. This happens when mm-camera supports more attributes
6526 * than the frameworks do */
6527 LOGH("Cannot find matching framework type");
6528 return NAME_NOT_FOUND;
6529}
6530
6531/*===========================================================================
6532 * FUNCTION : lookupHalName
6533 *
6534 * DESCRIPTION: In case the enum is not same in fwk and backend
6535 * make sure the parameter is correctly propogated
6536 *
6537 * PARAMETERS :
6538 * @arr : map between the two enums
6539 * @len : len of the map
6540 * @fwk_name : name of the hal_parm to map
6541 *
6542 * RETURN : int32_t type of status
6543 * hal_name -- success
6544 * none-zero failure code
6545 *==========================================================================*/
6546template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6547 size_t len, fwkType fwk_name)
6548{
6549 for (size_t i = 0; i < len; i++) {
6550 if (arr[i].fwk_name == fwk_name) {
6551 return arr[i].hal_name;
6552 }
6553 }
6554
6555 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6556 return NAME_NOT_FOUND;
6557}
6558
6559/*===========================================================================
6560 * FUNCTION : lookupProp
6561 *
6562 * DESCRIPTION: lookup a value by its name
6563 *
6564 * PARAMETERS :
6565 * @arr : map between the two enums
6566 * @len : size of the map
6567 * @name : name to be looked up
6568 *
6569 * RETURN : Value if found
6570 * CAM_CDS_MODE_MAX if not found
6571 *==========================================================================*/
6572template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6573 size_t len, const char *name)
6574{
6575 if (name) {
6576 for (size_t i = 0; i < len; i++) {
6577 if (!strcmp(arr[i].desc, name)) {
6578 return arr[i].val;
6579 }
6580 }
6581 }
6582 return CAM_CDS_MODE_MAX;
6583}
6584
6585/*===========================================================================
6586 *
6587 * DESCRIPTION:
6588 *
6589 * PARAMETERS :
6590 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006591 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006592 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006593 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6594 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006595 *
6596 * RETURN : camera_metadata_t*
6597 * metadata in a format specified by fwk
6598 *==========================================================================*/
6599camera_metadata_t*
6600QCamera3HardwareInterface::translateFromHalMetadata(
6601 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006602 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006603 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006604 bool lastMetadataInBatch,
6605 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006606{
6607 CameraMetadata camMetadata;
6608 camera_metadata_t *resultMetadata;
6609
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006610 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006611 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6612 * Timestamp is needed because it's used for shutter notify calculation.
6613 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006614 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006615 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006616 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006617 }
6618
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006619 if (pendingRequest.jpegMetadata.entryCount())
6620 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006621
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006622 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6623 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6624 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6625 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6626 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006627 if (mBatchSize == 0) {
6628 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006629 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006630 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006631
Samuel Ha68ba5172016-12-15 18:41:12 -08006632 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6633 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006634 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006635 // DevCamDebug metadata translateFromHalMetadata AF
6636 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6637 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6638 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6639 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6640 }
6641 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6642 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6643 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6644 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6645 }
6646 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6647 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6648 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6649 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6650 }
6651 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6652 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6653 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6654 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6655 }
6656 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6657 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6658 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6659 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6660 }
6661 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6662 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6663 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6664 *DevCamDebug_af_monitor_pdaf_target_pos;
6665 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6666 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6667 }
6668 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6669 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6670 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6671 *DevCamDebug_af_monitor_pdaf_confidence;
6672 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6673 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6674 }
6675 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6676 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6677 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6678 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6679 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6680 }
6681 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6682 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6683 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6684 *DevCamDebug_af_monitor_tof_target_pos;
6685 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6686 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6687 }
6688 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6689 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6690 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6691 *DevCamDebug_af_monitor_tof_confidence;
6692 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6693 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6694 }
6695 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6696 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6697 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6698 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6699 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6700 }
6701 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6702 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6703 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6704 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6705 &fwk_DevCamDebug_af_monitor_type_select, 1);
6706 }
6707 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6708 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6709 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6710 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6711 &fwk_DevCamDebug_af_monitor_refocus, 1);
6712 }
6713 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6714 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6715 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6716 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6717 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6718 }
6719 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6720 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6721 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6722 *DevCamDebug_af_search_pdaf_target_pos;
6723 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6724 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6725 }
6726 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6727 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6728 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6729 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6730 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6731 }
6732 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6733 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6734 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6735 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6736 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6737 }
6738 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6739 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6740 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6741 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6742 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6743 }
6744 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6745 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6746 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6747 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6748 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6749 }
6750 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6751 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6752 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6753 *DevCamDebug_af_search_tof_target_pos;
6754 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6755 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6756 }
6757 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6758 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6759 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6760 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6761 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6762 }
6763 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6764 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6765 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6766 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6767 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6768 }
6769 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6770 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6771 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6772 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6773 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6774 }
6775 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6776 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6777 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6778 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6779 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6780 }
6781 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6782 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6783 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6784 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6785 &fwk_DevCamDebug_af_search_type_select, 1);
6786 }
6787 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6788 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6789 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6790 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6791 &fwk_DevCamDebug_af_search_next_pos, 1);
6792 }
6793 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6794 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6795 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6796 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6797 &fwk_DevCamDebug_af_search_target_pos, 1);
6798 }
6799 // DevCamDebug metadata translateFromHalMetadata AEC
6800 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6801 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6802 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6803 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6804 }
6805 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6806 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6807 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6808 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6809 }
6810 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6811 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6812 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6813 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6814 }
6815 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6816 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6817 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6818 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6819 }
6820 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6821 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6822 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6823 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6824 }
6825 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6826 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6827 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6828 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6829 }
6830 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6831 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6832 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6833 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6834 }
6835 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6836 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6837 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6838 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6839 }
Samuel Ha34229982017-02-17 13:51:11 -08006840 // DevCamDebug metadata translateFromHalMetadata zzHDR
6841 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6842 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6843 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6844 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6845 }
6846 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6847 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006848 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006849 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6850 }
6851 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6852 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6853 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6854 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6855 }
6856 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6857 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006858 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006859 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6860 }
6861 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6862 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6863 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6864 *DevCamDebug_aec_hdr_sensitivity_ratio;
6865 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6866 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6867 }
6868 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6869 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6870 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6871 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6872 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6873 }
6874 // DevCamDebug metadata translateFromHalMetadata ADRC
6875 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6876 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6877 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6878 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6879 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6880 }
6881 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6882 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6883 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6884 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6885 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6886 }
6887 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6888 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6889 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6890 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6891 }
6892 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6893 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6894 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6895 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6896 }
6897 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6898 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6899 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6900 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6901 }
6902 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6903 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6904 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6905 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6906 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006907 // DevCamDebug metadata translateFromHalMetadata AWB
6908 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6909 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6910 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6911 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6912 }
6913 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6914 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6915 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6916 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6917 }
6918 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6919 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6920 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6921 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6922 }
6923 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6924 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6925 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6926 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6927 }
6928 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6929 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6930 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6931 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6932 }
6933 }
6934 // atrace_end(ATRACE_TAG_ALWAYS);
6935
Thierry Strudel3d639192016-09-09 11:52:26 -07006936 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6937 int64_t fwk_frame_number = *frame_number;
6938 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6939 }
6940
6941 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6942 int32_t fps_range[2];
6943 fps_range[0] = (int32_t)float_range->min_fps;
6944 fps_range[1] = (int32_t)float_range->max_fps;
6945 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6946 fps_range, 2);
6947 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6948 fps_range[0], fps_range[1]);
6949 }
6950
6951 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6952 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6953 }
6954
6955 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6956 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6957 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6958 *sceneMode);
6959 if (NAME_NOT_FOUND != val) {
6960 uint8_t fwkSceneMode = (uint8_t)val;
6961 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6962 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6963 fwkSceneMode);
6964 }
6965 }
6966
6967 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6968 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6969 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6970 }
6971
6972 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6973 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6974 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6975 }
6976
6977 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6978 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6979 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6980 }
6981
6982 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6983 CAM_INTF_META_EDGE_MODE, metadata) {
6984 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6985 }
6986
6987 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6988 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6989 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6990 }
6991
6992 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6993 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6994 }
6995
6996 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6997 if (0 <= *flashState) {
6998 uint8_t fwk_flashState = (uint8_t) *flashState;
6999 if (!gCamCapability[mCameraId]->flash_available) {
7000 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7001 }
7002 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7003 }
7004 }
7005
7006 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7007 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7008 if (NAME_NOT_FOUND != val) {
7009 uint8_t fwk_flashMode = (uint8_t)val;
7010 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7011 }
7012 }
7013
7014 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7015 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7016 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7017 }
7018
7019 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7020 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7021 }
7022
7023 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7024 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7025 }
7026
7027 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7028 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7029 }
7030
7031 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7032 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7033 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7034 }
7035
7036 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7037 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7038 LOGD("fwk_videoStab = %d", fwk_videoStab);
7039 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7040 } else {
7041 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7042 // and so hardcoding the Video Stab result to OFF mode.
7043 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7044 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007045 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007046 }
7047
7048 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7049 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7050 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7051 }
7052
7053 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7054 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7055 }
7056
Thierry Strudel3d639192016-09-09 11:52:26 -07007057 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7058 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007059 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007060
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007061 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7062 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007063
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007064 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007065 blackLevelAppliedPattern->cam_black_level[0],
7066 blackLevelAppliedPattern->cam_black_level[1],
7067 blackLevelAppliedPattern->cam_black_level[2],
7068 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007069 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7070 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007071
7072#ifndef USE_HAL_3_3
7073 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307074 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007075 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307076 fwk_blackLevelInd[0] /= 16.0;
7077 fwk_blackLevelInd[1] /= 16.0;
7078 fwk_blackLevelInd[2] /= 16.0;
7079 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007080 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7081 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007082#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007083 }
7084
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007085#ifndef USE_HAL_3_3
7086 // Fixed whitelevel is used by ISP/Sensor
7087 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7088 &gCamCapability[mCameraId]->white_level, 1);
7089#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007090
7091 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7092 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7093 int32_t scalerCropRegion[4];
7094 scalerCropRegion[0] = hScalerCropRegion->left;
7095 scalerCropRegion[1] = hScalerCropRegion->top;
7096 scalerCropRegion[2] = hScalerCropRegion->width;
7097 scalerCropRegion[3] = hScalerCropRegion->height;
7098
7099 // Adjust crop region from sensor output coordinate system to active
7100 // array coordinate system.
7101 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7102 scalerCropRegion[2], scalerCropRegion[3]);
7103
7104 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7105 }
7106
7107 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7108 LOGD("sensorExpTime = %lld", *sensorExpTime);
7109 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7110 }
7111
7112 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7113 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7114 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7115 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7116 }
7117
7118 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7119 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7120 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7121 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7122 sensorRollingShutterSkew, 1);
7123 }
7124
7125 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7126 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7127 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7128
7129 //calculate the noise profile based on sensitivity
7130 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7131 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7132 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7133 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7134 noise_profile[i] = noise_profile_S;
7135 noise_profile[i+1] = noise_profile_O;
7136 }
7137 LOGD("noise model entry (S, O) is (%f, %f)",
7138 noise_profile_S, noise_profile_O);
7139 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7140 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7141 }
7142
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007143#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007144 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007145 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007146 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007147 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007148 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7149 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7150 }
7151 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007152#endif
7153
Thierry Strudel3d639192016-09-09 11:52:26 -07007154 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7155 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7156 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7157 }
7158
7159 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7160 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7161 *faceDetectMode);
7162 if (NAME_NOT_FOUND != val) {
7163 uint8_t fwk_faceDetectMode = (uint8_t)val;
7164 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7165
7166 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7167 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7168 CAM_INTF_META_FACE_DETECTION, metadata) {
7169 uint8_t numFaces = MIN(
7170 faceDetectionInfo->num_faces_detected, MAX_ROI);
7171 int32_t faceIds[MAX_ROI];
7172 uint8_t faceScores[MAX_ROI];
7173 int32_t faceRectangles[MAX_ROI * 4];
7174 int32_t faceLandmarks[MAX_ROI * 6];
7175 size_t j = 0, k = 0;
7176
7177 for (size_t i = 0; i < numFaces; i++) {
7178 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7179 // Adjust crop region from sensor output coordinate system to active
7180 // array coordinate system.
7181 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7182 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7183 rect.width, rect.height);
7184
7185 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7186 faceRectangles+j, -1);
7187
Jason Lee8ce36fa2017-04-19 19:40:37 -07007188 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7189 "bottom-right (%d, %d)",
7190 faceDetectionInfo->frame_id, i,
7191 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7192 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7193
Thierry Strudel3d639192016-09-09 11:52:26 -07007194 j+= 4;
7195 }
7196 if (numFaces <= 0) {
7197 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7198 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7199 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7200 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7201 }
7202
7203 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7204 numFaces);
7205 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7206 faceRectangles, numFaces * 4U);
7207 if (fwk_faceDetectMode ==
7208 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7209 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7210 CAM_INTF_META_FACE_LANDMARK, metadata) {
7211
7212 for (size_t i = 0; i < numFaces; i++) {
7213 // Map the co-ordinate sensor output coordinate system to active
7214 // array coordinate system.
7215 mCropRegionMapper.toActiveArray(
7216 landmarks->face_landmarks[i].left_eye_center.x,
7217 landmarks->face_landmarks[i].left_eye_center.y);
7218 mCropRegionMapper.toActiveArray(
7219 landmarks->face_landmarks[i].right_eye_center.x,
7220 landmarks->face_landmarks[i].right_eye_center.y);
7221 mCropRegionMapper.toActiveArray(
7222 landmarks->face_landmarks[i].mouth_center.x,
7223 landmarks->face_landmarks[i].mouth_center.y);
7224
7225 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007226
7227 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7228 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7229 faceDetectionInfo->frame_id, i,
7230 faceLandmarks[k + LEFT_EYE_X],
7231 faceLandmarks[k + LEFT_EYE_Y],
7232 faceLandmarks[k + RIGHT_EYE_X],
7233 faceLandmarks[k + RIGHT_EYE_Y],
7234 faceLandmarks[k + MOUTH_X],
7235 faceLandmarks[k + MOUTH_Y]);
7236
Thierry Strudel04e026f2016-10-10 11:27:36 -07007237 k+= TOTAL_LANDMARK_INDICES;
7238 }
7239 } else {
7240 for (size_t i = 0; i < numFaces; i++) {
7241 setInvalidLandmarks(faceLandmarks+k);
7242 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007243 }
7244 }
7245
Jason Lee49619db2017-04-13 12:07:22 -07007246 for (size_t i = 0; i < numFaces; i++) {
7247 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7248
7249 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7250 faceDetectionInfo->frame_id, i, faceIds[i]);
7251 }
7252
Thierry Strudel3d639192016-09-09 11:52:26 -07007253 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7254 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7255 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007256 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007257 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7258 CAM_INTF_META_FACE_BLINK, metadata) {
7259 uint8_t detected[MAX_ROI];
7260 uint8_t degree[MAX_ROI * 2];
7261 for (size_t i = 0; i < numFaces; i++) {
7262 detected[i] = blinks->blink[i].blink_detected;
7263 degree[2 * i] = blinks->blink[i].left_blink;
7264 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007265
Jason Lee49619db2017-04-13 12:07:22 -07007266 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7267 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7268 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7269 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007270 }
7271 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7272 detected, numFaces);
7273 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7274 degree, numFaces * 2);
7275 }
7276 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7277 CAM_INTF_META_FACE_SMILE, metadata) {
7278 uint8_t degree[MAX_ROI];
7279 uint8_t confidence[MAX_ROI];
7280 for (size_t i = 0; i < numFaces; i++) {
7281 degree[i] = smiles->smile[i].smile_degree;
7282 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007283
Jason Lee49619db2017-04-13 12:07:22 -07007284 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7285 "smile_degree=%d, smile_score=%d",
7286 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007287 }
7288 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7289 degree, numFaces);
7290 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7291 confidence, numFaces);
7292 }
7293 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7294 CAM_INTF_META_FACE_GAZE, metadata) {
7295 int8_t angle[MAX_ROI];
7296 int32_t direction[MAX_ROI * 3];
7297 int8_t degree[MAX_ROI * 2];
7298 for (size_t i = 0; i < numFaces; i++) {
7299 angle[i] = gazes->gaze[i].gaze_angle;
7300 direction[3 * i] = gazes->gaze[i].updown_dir;
7301 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7302 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7303 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7304 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007305
7306 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7307 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7308 "left_right_gaze=%d, top_bottom_gaze=%d",
7309 faceDetectionInfo->frame_id, i, angle[i],
7310 direction[3 * i], direction[3 * i + 1],
7311 direction[3 * i + 2],
7312 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007313 }
7314 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7315 (uint8_t *)angle, numFaces);
7316 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7317 direction, numFaces * 3);
7318 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7319 (uint8_t *)degree, numFaces * 2);
7320 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007321 }
7322 }
7323 }
7324 }
7325
7326 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7327 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007328 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007329 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007330 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007331
Shuzhen Wang14415f52016-11-16 18:26:18 -08007332 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7333 histogramBins = *histBins;
7334 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7335 }
7336
7337 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007338 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7339 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007340 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007341
7342 switch (stats_data->type) {
7343 case CAM_HISTOGRAM_TYPE_BAYER:
7344 switch (stats_data->bayer_stats.data_type) {
7345 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007346 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7347 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007348 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007349 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7350 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007351 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007352 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7353 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007354 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007355 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007356 case CAM_STATS_CHANNEL_R:
7357 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007358 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7359 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007360 }
7361 break;
7362 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007363 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007364 break;
7365 }
7366
Shuzhen Wang14415f52016-11-16 18:26:18 -08007367 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007368 }
7369 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007370 }
7371
7372 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7373 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7374 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7375 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7376 }
7377
7378 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7379 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7380 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7381 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7382 }
7383
7384 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7385 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7386 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7387 CAM_MAX_SHADING_MAP_HEIGHT);
7388 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7389 CAM_MAX_SHADING_MAP_WIDTH);
7390 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7391 lensShadingMap->lens_shading, 4U * map_width * map_height);
7392 }
7393
7394 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7395 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7396 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7397 }
7398
7399 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7400 //Populate CAM_INTF_META_TONEMAP_CURVES
7401 /* ch0 = G, ch 1 = B, ch 2 = R*/
7402 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7403 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7404 tonemap->tonemap_points_cnt,
7405 CAM_MAX_TONEMAP_CURVE_SIZE);
7406 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7407 }
7408
7409 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7410 &tonemap->curves[0].tonemap_points[0][0],
7411 tonemap->tonemap_points_cnt * 2);
7412
7413 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7414 &tonemap->curves[1].tonemap_points[0][0],
7415 tonemap->tonemap_points_cnt * 2);
7416
7417 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7418 &tonemap->curves[2].tonemap_points[0][0],
7419 tonemap->tonemap_points_cnt * 2);
7420 }
7421
7422 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7423 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7424 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7425 CC_GAIN_MAX);
7426 }
7427
7428 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7429 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7430 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7431 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7432 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7433 }
7434
7435 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7436 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7437 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7438 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7439 toneCurve->tonemap_points_cnt,
7440 CAM_MAX_TONEMAP_CURVE_SIZE);
7441 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7442 }
7443 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7444 (float*)toneCurve->curve.tonemap_points,
7445 toneCurve->tonemap_points_cnt * 2);
7446 }
7447
7448 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7449 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7450 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7451 predColorCorrectionGains->gains, 4);
7452 }
7453
7454 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7455 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7456 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7457 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7458 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7459 }
7460
7461 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7462 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7463 }
7464
7465 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7466 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7467 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7468 }
7469
7470 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7471 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7472 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7473 }
7474
7475 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7476 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7477 *effectMode);
7478 if (NAME_NOT_FOUND != val) {
7479 uint8_t fwk_effectMode = (uint8_t)val;
7480 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7481 }
7482 }
7483
7484 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7485 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7486 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7487 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7488 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7489 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7490 }
7491 int32_t fwk_testPatternData[4];
7492 fwk_testPatternData[0] = testPatternData->r;
7493 fwk_testPatternData[3] = testPatternData->b;
7494 switch (gCamCapability[mCameraId]->color_arrangement) {
7495 case CAM_FILTER_ARRANGEMENT_RGGB:
7496 case CAM_FILTER_ARRANGEMENT_GRBG:
7497 fwk_testPatternData[1] = testPatternData->gr;
7498 fwk_testPatternData[2] = testPatternData->gb;
7499 break;
7500 case CAM_FILTER_ARRANGEMENT_GBRG:
7501 case CAM_FILTER_ARRANGEMENT_BGGR:
7502 fwk_testPatternData[2] = testPatternData->gr;
7503 fwk_testPatternData[1] = testPatternData->gb;
7504 break;
7505 default:
7506 LOGE("color arrangement %d is not supported",
7507 gCamCapability[mCameraId]->color_arrangement);
7508 break;
7509 }
7510 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7511 }
7512
7513 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7514 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7515 }
7516
7517 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7518 String8 str((const char *)gps_methods);
7519 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7520 }
7521
7522 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7523 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7524 }
7525
7526 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7527 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7528 }
7529
7530 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7531 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7532 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7533 }
7534
7535 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7536 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7537 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7538 }
7539
7540 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7541 int32_t fwk_thumb_size[2];
7542 fwk_thumb_size[0] = thumb_size->width;
7543 fwk_thumb_size[1] = thumb_size->height;
7544 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7545 }
7546
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007547 // Skip reprocess metadata if there is no input stream.
7548 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7549 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7550 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7551 privateData,
7552 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7553 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007554 }
7555
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007556 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007557 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007558 meteringMode, 1);
7559 }
7560
Thierry Strudel54dc9782017-02-15 12:12:10 -08007561 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7562 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7563 LOGD("hdr_scene_data: %d %f\n",
7564 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7565 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7566 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7567 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7568 &isHdr, 1);
7569 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7570 &isHdrConfidence, 1);
7571 }
7572
7573
7574
Thierry Strudel3d639192016-09-09 11:52:26 -07007575 if (metadata->is_tuning_params_valid) {
7576 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7577 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7578 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7579
7580
7581 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7582 sizeof(uint32_t));
7583 data += sizeof(uint32_t);
7584
7585 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7586 sizeof(uint32_t));
7587 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7588 data += sizeof(uint32_t);
7589
7590 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7591 sizeof(uint32_t));
7592 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7593 data += sizeof(uint32_t);
7594
7595 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7596 sizeof(uint32_t));
7597 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7598 data += sizeof(uint32_t);
7599
7600 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7601 sizeof(uint32_t));
7602 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7603 data += sizeof(uint32_t);
7604
7605 metadata->tuning_params.tuning_mod3_data_size = 0;
7606 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7607 sizeof(uint32_t));
7608 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7609 data += sizeof(uint32_t);
7610
7611 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7612 TUNING_SENSOR_DATA_MAX);
7613 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7614 count);
7615 data += count;
7616
7617 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7618 TUNING_VFE_DATA_MAX);
7619 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7620 count);
7621 data += count;
7622
7623 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7624 TUNING_CPP_DATA_MAX);
7625 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7626 count);
7627 data += count;
7628
7629 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7630 TUNING_CAC_DATA_MAX);
7631 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7632 count);
7633 data += count;
7634
7635 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7636 (int32_t *)(void *)tuning_meta_data_blob,
7637 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7638 }
7639
7640 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7641 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7642 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7643 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7644 NEUTRAL_COL_POINTS);
7645 }
7646
7647 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7648 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7649 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7650 }
7651
7652 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7653 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7654 // Adjust crop region from sensor output coordinate system to active
7655 // array coordinate system.
7656 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7657 hAeRegions->rect.width, hAeRegions->rect.height);
7658
7659 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7660 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7661 REGIONS_TUPLE_COUNT);
7662 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7663 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7664 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7665 hAeRegions->rect.height);
7666 }
7667
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007668 if (!pendingRequest.focusStateSent) {
7669 if (pendingRequest.focusStateValid) {
7670 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7671 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007672 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007673 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7674 uint8_t fwk_afState = (uint8_t) *afState;
7675 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7676 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7677 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007678 }
7679 }
7680
Thierry Strudel3d639192016-09-09 11:52:26 -07007681 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7682 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7683 }
7684
7685 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7686 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7687 }
7688
7689 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7690 uint8_t fwk_lensState = *lensState;
7691 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7692 }
7693
Thierry Strudel3d639192016-09-09 11:52:26 -07007694 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007695 uint32_t ab_mode = *hal_ab_mode;
7696 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7697 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7698 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7699 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007700 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007701 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007702 if (NAME_NOT_FOUND != val) {
7703 uint8_t fwk_ab_mode = (uint8_t)val;
7704 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7705 }
7706 }
7707
7708 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7709 int val = lookupFwkName(SCENE_MODES_MAP,
7710 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7711 if (NAME_NOT_FOUND != val) {
7712 uint8_t fwkBestshotMode = (uint8_t)val;
7713 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7714 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7715 } else {
7716 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7717 }
7718 }
7719
7720 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7721 uint8_t fwk_mode = (uint8_t) *mode;
7722 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7723 }
7724
7725 /* Constant metadata values to be update*/
7726 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7727 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7728
7729 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7730 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7731
7732 int32_t hotPixelMap[2];
7733 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7734
7735 // CDS
7736 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7737 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7738 }
7739
Thierry Strudel04e026f2016-10-10 11:27:36 -07007740 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7741 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007742 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007743 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7744 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7745 } else {
7746 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7747 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007748
7749 if(fwk_hdr != curr_hdr_state) {
7750 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7751 if(fwk_hdr)
7752 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7753 else
7754 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7755 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007756 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7757 }
7758
Thierry Strudel54dc9782017-02-15 12:12:10 -08007759 //binning correction
7760 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7761 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7762 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7763 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7764 }
7765
Thierry Strudel04e026f2016-10-10 11:27:36 -07007766 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007767 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007768 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7769 int8_t is_ir_on = 0;
7770
7771 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7772 if(is_ir_on != curr_ir_state) {
7773 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7774 if(is_ir_on)
7775 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7776 else
7777 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7778 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007779 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007780 }
7781
Thierry Strudel269c81a2016-10-12 12:13:59 -07007782 // AEC SPEED
7783 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7784 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7785 }
7786
7787 // AWB SPEED
7788 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7789 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7790 }
7791
Thierry Strudel3d639192016-09-09 11:52:26 -07007792 // TNR
7793 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7794 uint8_t tnr_enable = tnr->denoise_enable;
7795 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007796 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7797 int8_t is_tnr_on = 0;
7798
7799 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7800 if(is_tnr_on != curr_tnr_state) {
7801 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7802 if(is_tnr_on)
7803 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7804 else
7805 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7806 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007807
7808 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7809 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7810 }
7811
7812 // Reprocess crop data
7813 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7814 uint8_t cnt = crop_data->num_of_streams;
7815 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7816 // mm-qcamera-daemon only posts crop_data for streams
7817 // not linked to pproc. So no valid crop metadata is not
7818 // necessarily an error case.
7819 LOGD("No valid crop metadata entries");
7820 } else {
7821 uint32_t reproc_stream_id;
7822 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7823 LOGD("No reprocessible stream found, ignore crop data");
7824 } else {
7825 int rc = NO_ERROR;
7826 Vector<int32_t> roi_map;
7827 int32_t *crop = new int32_t[cnt*4];
7828 if (NULL == crop) {
7829 rc = NO_MEMORY;
7830 }
7831 if (NO_ERROR == rc) {
7832 int32_t streams_found = 0;
7833 for (size_t i = 0; i < cnt; i++) {
7834 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7835 if (pprocDone) {
7836 // HAL already does internal reprocessing,
7837 // either via reprocessing before JPEG encoding,
7838 // or offline postprocessing for pproc bypass case.
7839 crop[0] = 0;
7840 crop[1] = 0;
7841 crop[2] = mInputStreamInfo.dim.width;
7842 crop[3] = mInputStreamInfo.dim.height;
7843 } else {
7844 crop[0] = crop_data->crop_info[i].crop.left;
7845 crop[1] = crop_data->crop_info[i].crop.top;
7846 crop[2] = crop_data->crop_info[i].crop.width;
7847 crop[3] = crop_data->crop_info[i].crop.height;
7848 }
7849 roi_map.add(crop_data->crop_info[i].roi_map.left);
7850 roi_map.add(crop_data->crop_info[i].roi_map.top);
7851 roi_map.add(crop_data->crop_info[i].roi_map.width);
7852 roi_map.add(crop_data->crop_info[i].roi_map.height);
7853 streams_found++;
7854 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7855 crop[0], crop[1], crop[2], crop[3]);
7856 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7857 crop_data->crop_info[i].roi_map.left,
7858 crop_data->crop_info[i].roi_map.top,
7859 crop_data->crop_info[i].roi_map.width,
7860 crop_data->crop_info[i].roi_map.height);
7861 break;
7862
7863 }
7864 }
7865 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7866 &streams_found, 1);
7867 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7868 crop, (size_t)(streams_found * 4));
7869 if (roi_map.array()) {
7870 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7871 roi_map.array(), roi_map.size());
7872 }
7873 }
7874 if (crop) {
7875 delete [] crop;
7876 }
7877 }
7878 }
7879 }
7880
7881 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7882 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7883 // so hardcoding the CAC result to OFF mode.
7884 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7885 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7886 } else {
7887 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7888 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7889 *cacMode);
7890 if (NAME_NOT_FOUND != val) {
7891 uint8_t resultCacMode = (uint8_t)val;
7892 // check whether CAC result from CB is equal to Framework set CAC mode
7893 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007894 if (pendingRequest.fwkCacMode != resultCacMode) {
7895 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007896 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007897 //Check if CAC is disabled by property
7898 if (m_cacModeDisabled) {
7899 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7900 }
7901
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007902 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007903 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7904 } else {
7905 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7906 }
7907 }
7908 }
7909
7910 // Post blob of cam_cds_data through vendor tag.
7911 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7912 uint8_t cnt = cdsInfo->num_of_streams;
7913 cam_cds_data_t cdsDataOverride;
7914 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7915 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7916 cdsDataOverride.num_of_streams = 1;
7917 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7918 uint32_t reproc_stream_id;
7919 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7920 LOGD("No reprocessible stream found, ignore cds data");
7921 } else {
7922 for (size_t i = 0; i < cnt; i++) {
7923 if (cdsInfo->cds_info[i].stream_id ==
7924 reproc_stream_id) {
7925 cdsDataOverride.cds_info[0].cds_enable =
7926 cdsInfo->cds_info[i].cds_enable;
7927 break;
7928 }
7929 }
7930 }
7931 } else {
7932 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7933 }
7934 camMetadata.update(QCAMERA3_CDS_INFO,
7935 (uint8_t *)&cdsDataOverride,
7936 sizeof(cam_cds_data_t));
7937 }
7938
7939 // Ldaf calibration data
7940 if (!mLdafCalibExist) {
7941 IF_META_AVAILABLE(uint32_t, ldafCalib,
7942 CAM_INTF_META_LDAF_EXIF, metadata) {
7943 mLdafCalibExist = true;
7944 mLdafCalib[0] = ldafCalib[0];
7945 mLdafCalib[1] = ldafCalib[1];
7946 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7947 ldafCalib[0], ldafCalib[1]);
7948 }
7949 }
7950
Thierry Strudel54dc9782017-02-15 12:12:10 -08007951 // EXIF debug data through vendor tag
7952 /*
7953 * Mobicat Mask can assume 3 values:
7954 * 1 refers to Mobicat data,
7955 * 2 refers to Stats Debug and Exif Debug Data
7956 * 3 refers to Mobicat and Stats Debug Data
7957 * We want to make sure that we are sending Exif debug data
7958 * only when Mobicat Mask is 2.
7959 */
7960 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7961 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7962 (uint8_t *)(void *)mExifParams.debug_params,
7963 sizeof(mm_jpeg_debug_exif_params_t));
7964 }
7965
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007966 // Reprocess and DDM debug data through vendor tag
7967 cam_reprocess_info_t repro_info;
7968 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007969 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7970 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007971 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007972 }
7973 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7974 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007975 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007976 }
7977 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7978 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007979 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007980 }
7981 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7982 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007983 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007984 }
7985 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7986 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007987 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007988 }
7989 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007990 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007991 }
7992 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7993 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007994 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007995 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007996 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7997 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7998 }
7999 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8000 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8001 }
8002 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8003 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008004
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008005 // INSTANT AEC MODE
8006 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8007 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8008 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8009 }
8010
Shuzhen Wange763e802016-03-31 10:24:29 -07008011 // AF scene change
8012 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8013 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8014 }
8015
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008016 // Enable ZSL
8017 if (enableZsl != nullptr) {
8018 uint8_t value = *enableZsl ?
8019 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8020 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8021 }
8022
Xu Han821ea9c2017-05-23 09:00:40 -07008023 // OIS Data
8024 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8025 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8026 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8027 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8028 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8029 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8030 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8031 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8032 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8033 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8034 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8035 }
8036
Thierry Strudel3d639192016-09-09 11:52:26 -07008037 resultMetadata = camMetadata.release();
8038 return resultMetadata;
8039}
8040
8041/*===========================================================================
8042 * FUNCTION : saveExifParams
8043 *
8044 * DESCRIPTION:
8045 *
8046 * PARAMETERS :
8047 * @metadata : metadata information from callback
8048 *
8049 * RETURN : none
8050 *
8051 *==========================================================================*/
8052void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8053{
8054 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8055 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8056 if (mExifParams.debug_params) {
8057 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8058 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8059 }
8060 }
8061 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8062 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8063 if (mExifParams.debug_params) {
8064 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8065 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8066 }
8067 }
8068 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8069 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8070 if (mExifParams.debug_params) {
8071 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8072 mExifParams.debug_params->af_debug_params_valid = TRUE;
8073 }
8074 }
8075 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8076 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8077 if (mExifParams.debug_params) {
8078 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8079 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8080 }
8081 }
8082 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8083 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8084 if (mExifParams.debug_params) {
8085 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8086 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8087 }
8088 }
8089 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8090 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8091 if (mExifParams.debug_params) {
8092 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8093 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8094 }
8095 }
8096 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8097 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8098 if (mExifParams.debug_params) {
8099 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8100 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8101 }
8102 }
8103 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8104 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8105 if (mExifParams.debug_params) {
8106 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8107 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8108 }
8109 }
8110}
8111
8112/*===========================================================================
8113 * FUNCTION : get3AExifParams
8114 *
8115 * DESCRIPTION:
8116 *
8117 * PARAMETERS : none
8118 *
8119 *
8120 * RETURN : mm_jpeg_exif_params_t
8121 *
8122 *==========================================================================*/
8123mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8124{
8125 return mExifParams;
8126}
8127
8128/*===========================================================================
8129 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8130 *
8131 * DESCRIPTION:
8132 *
8133 * PARAMETERS :
8134 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008135 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8136 * urgent metadata in a batch. Always true for
8137 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008138 * @frame_number : frame number for this urgent metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07008139 *
8140 * RETURN : camera_metadata_t*
8141 * metadata in a format specified by fwk
8142 *==========================================================================*/
8143camera_metadata_t*
8144QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008145 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
8146 uint32_t frame_number)
Thierry Strudel3d639192016-09-09 11:52:26 -07008147{
8148 CameraMetadata camMetadata;
8149 camera_metadata_t *resultMetadata;
8150
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008151 if (!lastUrgentMetadataInBatch) {
8152 /* In batch mode, use empty metadata if this is not the last in batch
8153 */
8154 resultMetadata = allocate_camera_metadata(0, 0);
8155 return resultMetadata;
8156 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008157
8158 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8159 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8160 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8161 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8162 }
8163
8164 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8165 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8166 &aecTrigger->trigger, 1);
8167 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8168 &aecTrigger->trigger_id, 1);
8169 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8170 aecTrigger->trigger);
8171 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8172 aecTrigger->trigger_id);
8173 }
8174
8175 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8176 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8177 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8178 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8179 }
8180
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008181 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8182 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8183 if (NAME_NOT_FOUND != val) {
8184 uint8_t fwkAfMode = (uint8_t)val;
8185 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8186 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8187 } else {
8188 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8189 val);
8190 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008191 }
8192
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008193 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8194 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8195 af_trigger->trigger);
8196 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8197 af_trigger->trigger_id);
8198
8199 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8200 mAfTrigger = *af_trigger;
8201 uint32_t fwk_AfState = (uint32_t) *afState;
8202
8203 // If this is the result for a new trigger, check if there is new early
8204 // af state. If there is, use the last af state for all results
8205 // preceding current partial frame number.
8206 for (auto & pendingRequest : mPendingRequestsList) {
8207 if (pendingRequest.frame_number < frame_number) {
8208 pendingRequest.focusStateValid = true;
8209 pendingRequest.focusState = fwk_AfState;
8210 } else if (pendingRequest.frame_number == frame_number) {
8211 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8212 // Check if early AF state for trigger exists. If yes, send AF state as
8213 // partial result for better latency.
8214 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8215 pendingRequest.focusStateSent = true;
8216 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8217 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8218 frame_number, fwkEarlyAfState);
8219 }
8220 }
8221 }
8222 }
8223 }
8224 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8225 &mAfTrigger.trigger, 1);
8226 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8227
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008228 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8229 /*af regions*/
8230 int32_t afRegions[REGIONS_TUPLE_COUNT];
8231 // Adjust crop region from sensor output coordinate system to active
8232 // array coordinate system.
8233 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8234 hAfRegions->rect.width, hAfRegions->rect.height);
8235
8236 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8237 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8238 REGIONS_TUPLE_COUNT);
8239 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8240 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8241 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8242 hAfRegions->rect.height);
8243 }
8244
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008245 // AF region confidence
8246 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8247 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8248 }
8249
Thierry Strudel3d639192016-09-09 11:52:26 -07008250 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8251 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8252 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8253 if (NAME_NOT_FOUND != val) {
8254 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8255 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8256 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8257 } else {
8258 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8259 }
8260 }
8261
8262 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8263 uint32_t aeMode = CAM_AE_MODE_MAX;
8264 int32_t flashMode = CAM_FLASH_MODE_MAX;
8265 int32_t redeye = -1;
8266 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8267 aeMode = *pAeMode;
8268 }
8269 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8270 flashMode = *pFlashMode;
8271 }
8272 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8273 redeye = *pRedeye;
8274 }
8275
8276 if (1 == redeye) {
8277 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8278 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8279 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8280 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8281 flashMode);
8282 if (NAME_NOT_FOUND != val) {
8283 fwk_aeMode = (uint8_t)val;
8284 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8285 } else {
8286 LOGE("Unsupported flash mode %d", flashMode);
8287 }
8288 } else if (aeMode == CAM_AE_MODE_ON) {
8289 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8290 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8291 } else if (aeMode == CAM_AE_MODE_OFF) {
8292 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8293 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008294 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8295 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8296 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008297 } else {
8298 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8299 "flashMode:%d, aeMode:%u!!!",
8300 redeye, flashMode, aeMode);
8301 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008302 if (mInstantAEC) {
8303 // Increment frame Idx count untill a bound reached for instant AEC.
8304 mInstantAecFrameIdxCount++;
8305 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8306 CAM_INTF_META_AEC_INFO, metadata) {
8307 LOGH("ae_params->settled = %d",ae_params->settled);
8308 // If AEC settled, or if number of frames reached bound value,
8309 // should reset instant AEC.
8310 if (ae_params->settled ||
8311 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8312 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8313 mInstantAEC = false;
8314 mResetInstantAEC = true;
8315 mInstantAecFrameIdxCount = 0;
8316 }
8317 }
8318 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008319 resultMetadata = camMetadata.release();
8320 return resultMetadata;
8321}
8322
8323/*===========================================================================
8324 * FUNCTION : dumpMetadataToFile
8325 *
8326 * DESCRIPTION: Dumps tuning metadata to file system
8327 *
8328 * PARAMETERS :
8329 * @meta : tuning metadata
8330 * @dumpFrameCount : current dump frame count
8331 * @enabled : Enable mask
8332 *
8333 *==========================================================================*/
8334void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8335 uint32_t &dumpFrameCount,
8336 bool enabled,
8337 const char *type,
8338 uint32_t frameNumber)
8339{
8340 //Some sanity checks
8341 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8342 LOGE("Tuning sensor data size bigger than expected %d: %d",
8343 meta.tuning_sensor_data_size,
8344 TUNING_SENSOR_DATA_MAX);
8345 return;
8346 }
8347
8348 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8349 LOGE("Tuning VFE data size bigger than expected %d: %d",
8350 meta.tuning_vfe_data_size,
8351 TUNING_VFE_DATA_MAX);
8352 return;
8353 }
8354
8355 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8356 LOGE("Tuning CPP data size bigger than expected %d: %d",
8357 meta.tuning_cpp_data_size,
8358 TUNING_CPP_DATA_MAX);
8359 return;
8360 }
8361
8362 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8363 LOGE("Tuning CAC data size bigger than expected %d: %d",
8364 meta.tuning_cac_data_size,
8365 TUNING_CAC_DATA_MAX);
8366 return;
8367 }
8368 //
8369
8370 if(enabled){
8371 char timeBuf[FILENAME_MAX];
8372 char buf[FILENAME_MAX];
8373 memset(buf, 0, sizeof(buf));
8374 memset(timeBuf, 0, sizeof(timeBuf));
8375 time_t current_time;
8376 struct tm * timeinfo;
8377 time (&current_time);
8378 timeinfo = localtime (&current_time);
8379 if (timeinfo != NULL) {
8380 strftime (timeBuf, sizeof(timeBuf),
8381 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8382 }
8383 String8 filePath(timeBuf);
8384 snprintf(buf,
8385 sizeof(buf),
8386 "%dm_%s_%d.bin",
8387 dumpFrameCount,
8388 type,
8389 frameNumber);
8390 filePath.append(buf);
8391 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8392 if (file_fd >= 0) {
8393 ssize_t written_len = 0;
8394 meta.tuning_data_version = TUNING_DATA_VERSION;
8395 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8396 written_len += write(file_fd, data, sizeof(uint32_t));
8397 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8398 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8399 written_len += write(file_fd, data, sizeof(uint32_t));
8400 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8401 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8402 written_len += write(file_fd, data, sizeof(uint32_t));
8403 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8404 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8405 written_len += write(file_fd, data, sizeof(uint32_t));
8406 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8407 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8408 written_len += write(file_fd, data, sizeof(uint32_t));
8409 meta.tuning_mod3_data_size = 0;
8410 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8411 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8412 written_len += write(file_fd, data, sizeof(uint32_t));
8413 size_t total_size = meta.tuning_sensor_data_size;
8414 data = (void *)((uint8_t *)&meta.data);
8415 written_len += write(file_fd, data, total_size);
8416 total_size = meta.tuning_vfe_data_size;
8417 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8418 written_len += write(file_fd, data, total_size);
8419 total_size = meta.tuning_cpp_data_size;
8420 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8421 written_len += write(file_fd, data, total_size);
8422 total_size = meta.tuning_cac_data_size;
8423 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8424 written_len += write(file_fd, data, total_size);
8425 close(file_fd);
8426 }else {
8427 LOGE("fail to open file for metadata dumping");
8428 }
8429 }
8430}
8431
8432/*===========================================================================
8433 * FUNCTION : cleanAndSortStreamInfo
8434 *
8435 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8436 * and sort them such that raw stream is at the end of the list
8437 * This is a workaround for camera daemon constraint.
8438 *
8439 * PARAMETERS : None
8440 *
8441 *==========================================================================*/
8442void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8443{
8444 List<stream_info_t *> newStreamInfo;
8445
8446 /*clean up invalid streams*/
8447 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8448 it != mStreamInfo.end();) {
8449 if(((*it)->status) == INVALID){
8450 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8451 delete channel;
8452 free(*it);
8453 it = mStreamInfo.erase(it);
8454 } else {
8455 it++;
8456 }
8457 }
8458
8459 // Move preview/video/callback/snapshot streams into newList
8460 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8461 it != mStreamInfo.end();) {
8462 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8463 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8464 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8465 newStreamInfo.push_back(*it);
8466 it = mStreamInfo.erase(it);
8467 } else
8468 it++;
8469 }
8470 // Move raw streams into newList
8471 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8472 it != mStreamInfo.end();) {
8473 newStreamInfo.push_back(*it);
8474 it = mStreamInfo.erase(it);
8475 }
8476
8477 mStreamInfo = newStreamInfo;
8478}
8479
8480/*===========================================================================
8481 * FUNCTION : extractJpegMetadata
8482 *
8483 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8484 * JPEG metadata is cached in HAL, and return as part of capture
8485 * result when metadata is returned from camera daemon.
8486 *
8487 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8488 * @request: capture request
8489 *
8490 *==========================================================================*/
8491void QCamera3HardwareInterface::extractJpegMetadata(
8492 CameraMetadata& jpegMetadata,
8493 const camera3_capture_request_t *request)
8494{
8495 CameraMetadata frame_settings;
8496 frame_settings = request->settings;
8497
8498 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8499 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8500 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8501 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8502
8503 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8504 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8505 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8506 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8507
8508 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8509 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8510 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8511 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8512
8513 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8514 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8515 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8516 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8517
8518 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8519 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8520 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8521 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8522
8523 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8524 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8525 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8526 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8527
8528 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8529 int32_t thumbnail_size[2];
8530 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8531 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8532 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8533 int32_t orientation =
8534 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008535 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008536 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8537 int32_t temp;
8538 temp = thumbnail_size[0];
8539 thumbnail_size[0] = thumbnail_size[1];
8540 thumbnail_size[1] = temp;
8541 }
8542 }
8543 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8544 thumbnail_size,
8545 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8546 }
8547
8548}
8549
8550/*===========================================================================
8551 * FUNCTION : convertToRegions
8552 *
8553 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8554 *
8555 * PARAMETERS :
8556 * @rect : cam_rect_t struct to convert
8557 * @region : int32_t destination array
8558 * @weight : if we are converting from cam_area_t, weight is valid
8559 * else weight = -1
8560 *
8561 *==========================================================================*/
8562void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8563 int32_t *region, int weight)
8564{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008565 region[FACE_LEFT] = rect.left;
8566 region[FACE_TOP] = rect.top;
8567 region[FACE_RIGHT] = rect.left + rect.width;
8568 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008569 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008570 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008571 }
8572}
8573
8574/*===========================================================================
8575 * FUNCTION : convertFromRegions
8576 *
8577 * DESCRIPTION: helper method to convert from array to cam_rect_t
8578 *
8579 * PARAMETERS :
8580 * @rect : cam_rect_t struct to convert
8581 * @region : int32_t destination array
8582 * @weight : if we are converting from cam_area_t, weight is valid
8583 * else weight = -1
8584 *
8585 *==========================================================================*/
8586void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008587 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008588{
Thierry Strudel3d639192016-09-09 11:52:26 -07008589 int32_t x_min = frame_settings.find(tag).data.i32[0];
8590 int32_t y_min = frame_settings.find(tag).data.i32[1];
8591 int32_t x_max = frame_settings.find(tag).data.i32[2];
8592 int32_t y_max = frame_settings.find(tag).data.i32[3];
8593 roi.weight = frame_settings.find(tag).data.i32[4];
8594 roi.rect.left = x_min;
8595 roi.rect.top = y_min;
8596 roi.rect.width = x_max - x_min;
8597 roi.rect.height = y_max - y_min;
8598}
8599
8600/*===========================================================================
8601 * FUNCTION : resetIfNeededROI
8602 *
8603 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8604 * crop region
8605 *
8606 * PARAMETERS :
8607 * @roi : cam_area_t struct to resize
8608 * @scalerCropRegion : cam_crop_region_t region to compare against
8609 *
8610 *
8611 *==========================================================================*/
8612bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8613 const cam_crop_region_t* scalerCropRegion)
8614{
8615 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8616 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8617 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8618 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8619
8620 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8621 * without having this check the calculations below to validate if the roi
8622 * is inside scalar crop region will fail resulting in the roi not being
8623 * reset causing algorithm to continue to use stale roi window
8624 */
8625 if (roi->weight == 0) {
8626 return true;
8627 }
8628
8629 if ((roi_x_max < scalerCropRegion->left) ||
8630 // right edge of roi window is left of scalar crop's left edge
8631 (roi_y_max < scalerCropRegion->top) ||
8632 // bottom edge of roi window is above scalar crop's top edge
8633 (roi->rect.left > crop_x_max) ||
8634 // left edge of roi window is beyond(right) of scalar crop's right edge
8635 (roi->rect.top > crop_y_max)){
8636 // top edge of roi windo is above scalar crop's top edge
8637 return false;
8638 }
8639 if (roi->rect.left < scalerCropRegion->left) {
8640 roi->rect.left = scalerCropRegion->left;
8641 }
8642 if (roi->rect.top < scalerCropRegion->top) {
8643 roi->rect.top = scalerCropRegion->top;
8644 }
8645 if (roi_x_max > crop_x_max) {
8646 roi_x_max = crop_x_max;
8647 }
8648 if (roi_y_max > crop_y_max) {
8649 roi_y_max = crop_y_max;
8650 }
8651 roi->rect.width = roi_x_max - roi->rect.left;
8652 roi->rect.height = roi_y_max - roi->rect.top;
8653 return true;
8654}
8655
8656/*===========================================================================
8657 * FUNCTION : convertLandmarks
8658 *
8659 * DESCRIPTION: helper method to extract the landmarks from face detection info
8660 *
8661 * PARAMETERS :
8662 * @landmark_data : input landmark data to be converted
8663 * @landmarks : int32_t destination array
8664 *
8665 *
8666 *==========================================================================*/
8667void QCamera3HardwareInterface::convertLandmarks(
8668 cam_face_landmarks_info_t landmark_data,
8669 int32_t *landmarks)
8670{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008671 if (landmark_data.is_left_eye_valid) {
8672 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8673 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8674 } else {
8675 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8676 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8677 }
8678
8679 if (landmark_data.is_right_eye_valid) {
8680 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8681 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8682 } else {
8683 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8684 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8685 }
8686
8687 if (landmark_data.is_mouth_valid) {
8688 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8689 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8690 } else {
8691 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8692 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8693 }
8694}
8695
8696/*===========================================================================
8697 * FUNCTION : setInvalidLandmarks
8698 *
8699 * DESCRIPTION: helper method to set invalid landmarks
8700 *
8701 * PARAMETERS :
8702 * @landmarks : int32_t destination array
8703 *
8704 *
8705 *==========================================================================*/
8706void QCamera3HardwareInterface::setInvalidLandmarks(
8707 int32_t *landmarks)
8708{
8709 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8710 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8711 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8712 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8713 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8714 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008715}
8716
8717#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008718
8719/*===========================================================================
8720 * FUNCTION : getCapabilities
8721 *
8722 * DESCRIPTION: query camera capability from back-end
8723 *
8724 * PARAMETERS :
8725 * @ops : mm-interface ops structure
8726 * @cam_handle : camera handle for which we need capability
8727 *
8728 * RETURN : ptr type of capability structure
8729 * capability for success
8730 * NULL for failure
8731 *==========================================================================*/
8732cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8733 uint32_t cam_handle)
8734{
8735 int rc = NO_ERROR;
8736 QCamera3HeapMemory *capabilityHeap = NULL;
8737 cam_capability_t *cap_ptr = NULL;
8738
8739 if (ops == NULL) {
8740 LOGE("Invalid arguments");
8741 return NULL;
8742 }
8743
8744 capabilityHeap = new QCamera3HeapMemory(1);
8745 if (capabilityHeap == NULL) {
8746 LOGE("creation of capabilityHeap failed");
8747 return NULL;
8748 }
8749
8750 /* Allocate memory for capability buffer */
8751 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8752 if(rc != OK) {
8753 LOGE("No memory for cappability");
8754 goto allocate_failed;
8755 }
8756
8757 /* Map memory for capability buffer */
8758 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8759
8760 rc = ops->map_buf(cam_handle,
8761 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8762 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8763 if(rc < 0) {
8764 LOGE("failed to map capability buffer");
8765 rc = FAILED_TRANSACTION;
8766 goto map_failed;
8767 }
8768
8769 /* Query Capability */
8770 rc = ops->query_capability(cam_handle);
8771 if(rc < 0) {
8772 LOGE("failed to query capability");
8773 rc = FAILED_TRANSACTION;
8774 goto query_failed;
8775 }
8776
8777 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8778 if (cap_ptr == NULL) {
8779 LOGE("out of memory");
8780 rc = NO_MEMORY;
8781 goto query_failed;
8782 }
8783
8784 memset(cap_ptr, 0, sizeof(cam_capability_t));
8785 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8786
8787 int index;
8788 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8789 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8790 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8791 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8792 }
8793
8794query_failed:
8795 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8796map_failed:
8797 capabilityHeap->deallocate();
8798allocate_failed:
8799 delete capabilityHeap;
8800
8801 if (rc != NO_ERROR) {
8802 return NULL;
8803 } else {
8804 return cap_ptr;
8805 }
8806}
8807
Thierry Strudel3d639192016-09-09 11:52:26 -07008808/*===========================================================================
8809 * FUNCTION : initCapabilities
8810 *
8811 * DESCRIPTION: initialize camera capabilities in static data struct
8812 *
8813 * PARAMETERS :
8814 * @cameraId : camera Id
8815 *
8816 * RETURN : int32_t type of status
8817 * NO_ERROR -- success
8818 * none-zero failure code
8819 *==========================================================================*/
8820int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8821{
8822 int rc = 0;
8823 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008824 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008825
8826 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8827 if (rc) {
8828 LOGE("camera_open failed. rc = %d", rc);
8829 goto open_failed;
8830 }
8831 if (!cameraHandle) {
8832 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8833 goto open_failed;
8834 }
8835
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008836 handle = get_main_camera_handle(cameraHandle->camera_handle);
8837 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8838 if (gCamCapability[cameraId] == NULL) {
8839 rc = FAILED_TRANSACTION;
8840 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008841 }
8842
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008843 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008844 if (is_dual_camera_by_idx(cameraId)) {
8845 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8846 gCamCapability[cameraId]->aux_cam_cap =
8847 getCapabilities(cameraHandle->ops, handle);
8848 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8849 rc = FAILED_TRANSACTION;
8850 free(gCamCapability[cameraId]);
8851 goto failed_op;
8852 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008853
8854 // Copy the main camera capability to main_cam_cap struct
8855 gCamCapability[cameraId]->main_cam_cap =
8856 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8857 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8858 LOGE("out of memory");
8859 rc = NO_MEMORY;
8860 goto failed_op;
8861 }
8862 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8863 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008864 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008865failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008866 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8867 cameraHandle = NULL;
8868open_failed:
8869 return rc;
8870}
8871
8872/*==========================================================================
8873 * FUNCTION : get3Aversion
8874 *
8875 * DESCRIPTION: get the Q3A S/W version
8876 *
8877 * PARAMETERS :
8878 * @sw_version: Reference of Q3A structure which will hold version info upon
8879 * return
8880 *
8881 * RETURN : None
8882 *
8883 *==========================================================================*/
8884void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8885{
8886 if(gCamCapability[mCameraId])
8887 sw_version = gCamCapability[mCameraId]->q3a_version;
8888 else
8889 LOGE("Capability structure NULL!");
8890}
8891
8892
8893/*===========================================================================
8894 * FUNCTION : initParameters
8895 *
8896 * DESCRIPTION: initialize camera parameters
8897 *
8898 * PARAMETERS :
8899 *
8900 * RETURN : int32_t type of status
8901 * NO_ERROR -- success
8902 * none-zero failure code
8903 *==========================================================================*/
8904int QCamera3HardwareInterface::initParameters()
8905{
8906 int rc = 0;
8907
8908 //Allocate Set Param Buffer
8909 mParamHeap = new QCamera3HeapMemory(1);
8910 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8911 if(rc != OK) {
8912 rc = NO_MEMORY;
8913 LOGE("Failed to allocate SETPARM Heap memory");
8914 delete mParamHeap;
8915 mParamHeap = NULL;
8916 return rc;
8917 }
8918
8919 //Map memory for parameters buffer
8920 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8921 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8922 mParamHeap->getFd(0),
8923 sizeof(metadata_buffer_t),
8924 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8925 if(rc < 0) {
8926 LOGE("failed to map SETPARM buffer");
8927 rc = FAILED_TRANSACTION;
8928 mParamHeap->deallocate();
8929 delete mParamHeap;
8930 mParamHeap = NULL;
8931 return rc;
8932 }
8933
8934 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8935
8936 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8937 return rc;
8938}
8939
8940/*===========================================================================
8941 * FUNCTION : deinitParameters
8942 *
8943 * DESCRIPTION: de-initialize camera parameters
8944 *
8945 * PARAMETERS :
8946 *
8947 * RETURN : NONE
8948 *==========================================================================*/
8949void QCamera3HardwareInterface::deinitParameters()
8950{
8951 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8952 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8953
8954 mParamHeap->deallocate();
8955 delete mParamHeap;
8956 mParamHeap = NULL;
8957
8958 mParameters = NULL;
8959
8960 free(mPrevParameters);
8961 mPrevParameters = NULL;
8962}
8963
8964/*===========================================================================
8965 * FUNCTION : calcMaxJpegSize
8966 *
8967 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8968 *
8969 * PARAMETERS :
8970 *
8971 * RETURN : max_jpeg_size
8972 *==========================================================================*/
8973size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8974{
8975 size_t max_jpeg_size = 0;
8976 size_t temp_width, temp_height;
8977 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8978 MAX_SIZES_CNT);
8979 for (size_t i = 0; i < count; i++) {
8980 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8981 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8982 if (temp_width * temp_height > max_jpeg_size ) {
8983 max_jpeg_size = temp_width * temp_height;
8984 }
8985 }
8986 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8987 return max_jpeg_size;
8988}
8989
8990/*===========================================================================
8991 * FUNCTION : getMaxRawSize
8992 *
8993 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8994 *
8995 * PARAMETERS :
8996 *
8997 * RETURN : Largest supported Raw Dimension
8998 *==========================================================================*/
8999cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9000{
9001 int max_width = 0;
9002 cam_dimension_t maxRawSize;
9003
9004 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9005 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9006 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9007 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9008 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9009 }
9010 }
9011 return maxRawSize;
9012}
9013
9014
9015/*===========================================================================
9016 * FUNCTION : calcMaxJpegDim
9017 *
9018 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9019 *
9020 * PARAMETERS :
9021 *
9022 * RETURN : max_jpeg_dim
9023 *==========================================================================*/
9024cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9025{
9026 cam_dimension_t max_jpeg_dim;
9027 cam_dimension_t curr_jpeg_dim;
9028 max_jpeg_dim.width = 0;
9029 max_jpeg_dim.height = 0;
9030 curr_jpeg_dim.width = 0;
9031 curr_jpeg_dim.height = 0;
9032 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9033 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9034 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9035 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9036 max_jpeg_dim.width * max_jpeg_dim.height ) {
9037 max_jpeg_dim.width = curr_jpeg_dim.width;
9038 max_jpeg_dim.height = curr_jpeg_dim.height;
9039 }
9040 }
9041 return max_jpeg_dim;
9042}
9043
9044/*===========================================================================
9045 * FUNCTION : addStreamConfig
9046 *
9047 * DESCRIPTION: adds the stream configuration to the array
9048 *
9049 * PARAMETERS :
9050 * @available_stream_configs : pointer to stream configuration array
9051 * @scalar_format : scalar format
9052 * @dim : configuration dimension
9053 * @config_type : input or output configuration type
9054 *
9055 * RETURN : NONE
9056 *==========================================================================*/
9057void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9058 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9059{
9060 available_stream_configs.add(scalar_format);
9061 available_stream_configs.add(dim.width);
9062 available_stream_configs.add(dim.height);
9063 available_stream_configs.add(config_type);
9064}
9065
9066/*===========================================================================
9067 * FUNCTION : suppportBurstCapture
9068 *
9069 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9070 *
9071 * PARAMETERS :
9072 * @cameraId : camera Id
9073 *
9074 * RETURN : true if camera supports BURST_CAPTURE
9075 * false otherwise
9076 *==========================================================================*/
9077bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9078{
9079 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9080 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9081 const int32_t highResWidth = 3264;
9082 const int32_t highResHeight = 2448;
9083
9084 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9085 // Maximum resolution images cannot be captured at >= 10fps
9086 // -> not supporting BURST_CAPTURE
9087 return false;
9088 }
9089
9090 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9091 // Maximum resolution images can be captured at >= 20fps
9092 // --> supporting BURST_CAPTURE
9093 return true;
9094 }
9095
9096 // Find the smallest highRes resolution, or largest resolution if there is none
9097 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9098 MAX_SIZES_CNT);
9099 size_t highRes = 0;
9100 while ((highRes + 1 < totalCnt) &&
9101 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9102 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9103 highResWidth * highResHeight)) {
9104 highRes++;
9105 }
9106 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9107 return true;
9108 } else {
9109 return false;
9110 }
9111}
9112
9113/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009114 * FUNCTION : getPDStatIndex
9115 *
9116 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9117 *
9118 * PARAMETERS :
9119 * @caps : camera capabilities
9120 *
9121 * RETURN : int32_t type
9122 * non-negative - on success
9123 * -1 - on failure
9124 *==========================================================================*/
9125int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9126 if (nullptr == caps) {
9127 return -1;
9128 }
9129
9130 uint32_t metaRawCount = caps->meta_raw_channel_count;
9131 int32_t ret = -1;
9132 for (size_t i = 0; i < metaRawCount; i++) {
9133 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9134 ret = i;
9135 break;
9136 }
9137 }
9138
9139 return ret;
9140}
9141
9142/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009143 * FUNCTION : initStaticMetadata
9144 *
9145 * DESCRIPTION: initialize the static metadata
9146 *
9147 * PARAMETERS :
9148 * @cameraId : camera Id
9149 *
9150 * RETURN : int32_t type of status
9151 * 0 -- success
9152 * non-zero failure code
9153 *==========================================================================*/
9154int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9155{
9156 int rc = 0;
9157 CameraMetadata staticInfo;
9158 size_t count = 0;
9159 bool limitedDevice = false;
9160 char prop[PROPERTY_VALUE_MAX];
9161 bool supportBurst = false;
9162
9163 supportBurst = supportBurstCapture(cameraId);
9164
9165 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9166 * guaranteed or if min fps of max resolution is less than 20 fps, its
9167 * advertised as limited device*/
9168 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9169 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9170 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9171 !supportBurst;
9172
9173 uint8_t supportedHwLvl = limitedDevice ?
9174 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009175#ifndef USE_HAL_3_3
9176 // LEVEL_3 - This device will support level 3.
9177 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9178#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009179 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009180#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009181
9182 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9183 &supportedHwLvl, 1);
9184
9185 bool facingBack = false;
9186 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9187 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9188 facingBack = true;
9189 }
9190 /*HAL 3 only*/
9191 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9192 &gCamCapability[cameraId]->min_focus_distance, 1);
9193
9194 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9195 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9196
9197 /*should be using focal lengths but sensor doesn't provide that info now*/
9198 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9199 &gCamCapability[cameraId]->focal_length,
9200 1);
9201
9202 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9203 gCamCapability[cameraId]->apertures,
9204 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9205
9206 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9207 gCamCapability[cameraId]->filter_densities,
9208 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9209
9210
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009211 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9212 size_t mode_count =
9213 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9214 for (size_t i = 0; i < mode_count; i++) {
9215 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9216 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009217 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009218 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009219
9220 int32_t lens_shading_map_size[] = {
9221 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9222 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9223 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9224 lens_shading_map_size,
9225 sizeof(lens_shading_map_size)/sizeof(int32_t));
9226
9227 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9228 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9229
9230 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9231 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9232
9233 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9234 &gCamCapability[cameraId]->max_frame_duration, 1);
9235
9236 camera_metadata_rational baseGainFactor = {
9237 gCamCapability[cameraId]->base_gain_factor.numerator,
9238 gCamCapability[cameraId]->base_gain_factor.denominator};
9239 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9240 &baseGainFactor, 1);
9241
9242 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9243 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9244
9245 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9246 gCamCapability[cameraId]->pixel_array_size.height};
9247 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9248 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9249
9250 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9251 gCamCapability[cameraId]->active_array_size.top,
9252 gCamCapability[cameraId]->active_array_size.width,
9253 gCamCapability[cameraId]->active_array_size.height};
9254 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9255 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9256
9257 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9258 &gCamCapability[cameraId]->white_level, 1);
9259
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009260 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9261 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9262 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009263 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009264 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009265
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009266#ifndef USE_HAL_3_3
9267 bool hasBlackRegions = false;
9268 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9269 LOGW("black_region_count: %d is bounded to %d",
9270 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9271 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9272 }
9273 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9274 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9275 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9276 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9277 }
9278 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9279 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9280 hasBlackRegions = true;
9281 }
9282#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009283 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9284 &gCamCapability[cameraId]->flash_charge_duration, 1);
9285
9286 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9287 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9288
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009289 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9290 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9291 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009292 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9293 &timestampSource, 1);
9294
Thierry Strudel54dc9782017-02-15 12:12:10 -08009295 //update histogram vendor data
9296 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009297 &gCamCapability[cameraId]->histogram_size, 1);
9298
Thierry Strudel54dc9782017-02-15 12:12:10 -08009299 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009300 &gCamCapability[cameraId]->max_histogram_count, 1);
9301
Shuzhen Wang14415f52016-11-16 18:26:18 -08009302 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9303 //so that app can request fewer number of bins than the maximum supported.
9304 std::vector<int32_t> histBins;
9305 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9306 histBins.push_back(maxHistBins);
9307 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9308 (maxHistBins & 0x1) == 0) {
9309 histBins.push_back(maxHistBins >> 1);
9310 maxHistBins >>= 1;
9311 }
9312 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9313 histBins.data(), histBins.size());
9314
Thierry Strudel3d639192016-09-09 11:52:26 -07009315 int32_t sharpness_map_size[] = {
9316 gCamCapability[cameraId]->sharpness_map_size.width,
9317 gCamCapability[cameraId]->sharpness_map_size.height};
9318
9319 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9320 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9321
9322 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9323 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9324
Emilian Peev0f3c3162017-03-15 12:57:46 +00009325 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9326 if (0 <= indexPD) {
9327 // Advertise PD stats data as part of the Depth capabilities
9328 int32_t depthWidth =
9329 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9330 int32_t depthHeight =
9331 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009332 int32_t depthStride =
9333 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009334 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9335 assert(0 < depthSamplesCount);
9336 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9337 &depthSamplesCount, 1);
9338
9339 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9340 depthHeight,
9341 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9342 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9343 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9344 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9345 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9346
9347 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9348 depthHeight, 33333333,
9349 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9350 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9351 depthMinDuration,
9352 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9353
9354 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9355 depthHeight, 0,
9356 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9357 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9358 depthStallDuration,
9359 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9360
9361 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9362 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009363
9364 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9365 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9366 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009367 }
9368
Thierry Strudel3d639192016-09-09 11:52:26 -07009369 int32_t scalar_formats[] = {
9370 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9371 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9372 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9373 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9374 HAL_PIXEL_FORMAT_RAW10,
9375 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009376 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9377 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9378 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009379
9380 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9381 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9382 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9383 count, MAX_SIZES_CNT, available_processed_sizes);
9384 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9385 available_processed_sizes, count * 2);
9386
9387 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9388 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9389 makeTable(gCamCapability[cameraId]->raw_dim,
9390 count, MAX_SIZES_CNT, available_raw_sizes);
9391 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9392 available_raw_sizes, count * 2);
9393
9394 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9395 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9396 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9397 count, MAX_SIZES_CNT, available_fps_ranges);
9398 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9399 available_fps_ranges, count * 2);
9400
9401 camera_metadata_rational exposureCompensationStep = {
9402 gCamCapability[cameraId]->exp_compensation_step.numerator,
9403 gCamCapability[cameraId]->exp_compensation_step.denominator};
9404 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9405 &exposureCompensationStep, 1);
9406
9407 Vector<uint8_t> availableVstabModes;
9408 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9409 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009410 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009411 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009412 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009413 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009414 count = IS_TYPE_MAX;
9415 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9416 for (size_t i = 0; i < count; i++) {
9417 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9418 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9419 eisSupported = true;
9420 break;
9421 }
9422 }
9423 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009424 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9425 }
9426 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9427 availableVstabModes.array(), availableVstabModes.size());
9428
9429 /*HAL 1 and HAL 3 common*/
9430 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9431 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9432 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009433 // Cap the max zoom to the max preferred value
9434 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009435 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9436 &maxZoom, 1);
9437
9438 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9439 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9440
9441 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9442 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9443 max3aRegions[2] = 0; /* AF not supported */
9444 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9445 max3aRegions, 3);
9446
9447 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9448 memset(prop, 0, sizeof(prop));
9449 property_get("persist.camera.facedetect", prop, "1");
9450 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9451 LOGD("Support face detection mode: %d",
9452 supportedFaceDetectMode);
9453
9454 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009455 /* support mode should be OFF if max number of face is 0 */
9456 if (maxFaces <= 0) {
9457 supportedFaceDetectMode = 0;
9458 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009459 Vector<uint8_t> availableFaceDetectModes;
9460 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9461 if (supportedFaceDetectMode == 1) {
9462 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9463 } else if (supportedFaceDetectMode == 2) {
9464 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9465 } else if (supportedFaceDetectMode == 3) {
9466 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9467 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9468 } else {
9469 maxFaces = 0;
9470 }
9471 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9472 availableFaceDetectModes.array(),
9473 availableFaceDetectModes.size());
9474 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9475 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009476 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9477 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9478 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009479
9480 int32_t exposureCompensationRange[] = {
9481 gCamCapability[cameraId]->exposure_compensation_min,
9482 gCamCapability[cameraId]->exposure_compensation_max};
9483 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9484 exposureCompensationRange,
9485 sizeof(exposureCompensationRange)/sizeof(int32_t));
9486
9487 uint8_t lensFacing = (facingBack) ?
9488 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9489 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9490
9491 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9492 available_thumbnail_sizes,
9493 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9494
9495 /*all sizes will be clubbed into this tag*/
9496 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9497 /*android.scaler.availableStreamConfigurations*/
9498 Vector<int32_t> available_stream_configs;
9499 cam_dimension_t active_array_dim;
9500 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9501 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009502
9503 /*advertise list of input dimensions supported based on below property.
9504 By default all sizes upto 5MP will be advertised.
9505 Note that the setprop resolution format should be WxH.
9506 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9507 To list all supported sizes, setprop needs to be set with "0x0" */
9508 cam_dimension_t minInputSize = {2592,1944}; //5MP
9509 memset(prop, 0, sizeof(prop));
9510 property_get("persist.camera.input.minsize", prop, "2592x1944");
9511 if (strlen(prop) > 0) {
9512 char *saveptr = NULL;
9513 char *token = strtok_r(prop, "x", &saveptr);
9514 if (token != NULL) {
9515 minInputSize.width = atoi(token);
9516 }
9517 token = strtok_r(NULL, "x", &saveptr);
9518 if (token != NULL) {
9519 minInputSize.height = atoi(token);
9520 }
9521 }
9522
Thierry Strudel3d639192016-09-09 11:52:26 -07009523 /* Add input/output stream configurations for each scalar formats*/
9524 for (size_t j = 0; j < scalar_formats_count; j++) {
9525 switch (scalar_formats[j]) {
9526 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9527 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9528 case HAL_PIXEL_FORMAT_RAW10:
9529 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9530 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9531 addStreamConfig(available_stream_configs, scalar_formats[j],
9532 gCamCapability[cameraId]->raw_dim[i],
9533 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9534 }
9535 break;
9536 case HAL_PIXEL_FORMAT_BLOB:
9537 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9538 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9539 addStreamConfig(available_stream_configs, scalar_formats[j],
9540 gCamCapability[cameraId]->picture_sizes_tbl[i],
9541 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9542 }
9543 break;
9544 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9545 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9546 default:
9547 cam_dimension_t largest_picture_size;
9548 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9549 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9550 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9551 addStreamConfig(available_stream_configs, scalar_formats[j],
9552 gCamCapability[cameraId]->picture_sizes_tbl[i],
9553 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009554 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009555 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9556 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009557 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9558 >= minInputSize.width) || (gCamCapability[cameraId]->
9559 picture_sizes_tbl[i].height >= minInputSize.height)) {
9560 addStreamConfig(available_stream_configs, scalar_formats[j],
9561 gCamCapability[cameraId]->picture_sizes_tbl[i],
9562 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9563 }
9564 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009565 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009566
Thierry Strudel3d639192016-09-09 11:52:26 -07009567 break;
9568 }
9569 }
9570
9571 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9572 available_stream_configs.array(), available_stream_configs.size());
9573 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9574 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9575
9576 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9577 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9578
9579 /* android.scaler.availableMinFrameDurations */
9580 Vector<int64_t> available_min_durations;
9581 for (size_t j = 0; j < scalar_formats_count; j++) {
9582 switch (scalar_formats[j]) {
9583 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9584 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9585 case HAL_PIXEL_FORMAT_RAW10:
9586 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9587 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9588 available_min_durations.add(scalar_formats[j]);
9589 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9590 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9591 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9592 }
9593 break;
9594 default:
9595 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9596 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9597 available_min_durations.add(scalar_formats[j]);
9598 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9599 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9600 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9601 }
9602 break;
9603 }
9604 }
9605 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9606 available_min_durations.array(), available_min_durations.size());
9607
9608 Vector<int32_t> available_hfr_configs;
9609 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9610 int32_t fps = 0;
9611 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9612 case CAM_HFR_MODE_60FPS:
9613 fps = 60;
9614 break;
9615 case CAM_HFR_MODE_90FPS:
9616 fps = 90;
9617 break;
9618 case CAM_HFR_MODE_120FPS:
9619 fps = 120;
9620 break;
9621 case CAM_HFR_MODE_150FPS:
9622 fps = 150;
9623 break;
9624 case CAM_HFR_MODE_180FPS:
9625 fps = 180;
9626 break;
9627 case CAM_HFR_MODE_210FPS:
9628 fps = 210;
9629 break;
9630 case CAM_HFR_MODE_240FPS:
9631 fps = 240;
9632 break;
9633 case CAM_HFR_MODE_480FPS:
9634 fps = 480;
9635 break;
9636 case CAM_HFR_MODE_OFF:
9637 case CAM_HFR_MODE_MAX:
9638 default:
9639 break;
9640 }
9641
9642 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9643 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9644 /* For each HFR frame rate, need to advertise one variable fps range
9645 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9646 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9647 * set by the app. When video recording is started, [120, 120] is
9648 * set. This way sensor configuration does not change when recording
9649 * is started */
9650
9651 /* (width, height, fps_min, fps_max, batch_size_max) */
9652 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9653 j < MAX_SIZES_CNT; j++) {
9654 available_hfr_configs.add(
9655 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9656 available_hfr_configs.add(
9657 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9658 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9659 available_hfr_configs.add(fps);
9660 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9661
9662 /* (width, height, fps_min, fps_max, batch_size_max) */
9663 available_hfr_configs.add(
9664 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9665 available_hfr_configs.add(
9666 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9667 available_hfr_configs.add(fps);
9668 available_hfr_configs.add(fps);
9669 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9670 }
9671 }
9672 }
9673 //Advertise HFR capability only if the property is set
9674 memset(prop, 0, sizeof(prop));
9675 property_get("persist.camera.hal3hfr.enable", prop, "1");
9676 uint8_t hfrEnable = (uint8_t)atoi(prop);
9677
9678 if(hfrEnable && available_hfr_configs.array()) {
9679 staticInfo.update(
9680 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9681 available_hfr_configs.array(), available_hfr_configs.size());
9682 }
9683
9684 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9685 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9686 &max_jpeg_size, 1);
9687
9688 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9689 size_t size = 0;
9690 count = CAM_EFFECT_MODE_MAX;
9691 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9692 for (size_t i = 0; i < count; i++) {
9693 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9694 gCamCapability[cameraId]->supported_effects[i]);
9695 if (NAME_NOT_FOUND != val) {
9696 avail_effects[size] = (uint8_t)val;
9697 size++;
9698 }
9699 }
9700 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9701 avail_effects,
9702 size);
9703
9704 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9705 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9706 size_t supported_scene_modes_cnt = 0;
9707 count = CAM_SCENE_MODE_MAX;
9708 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9709 for (size_t i = 0; i < count; i++) {
9710 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9711 CAM_SCENE_MODE_OFF) {
9712 int val = lookupFwkName(SCENE_MODES_MAP,
9713 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9714 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009715
Thierry Strudel3d639192016-09-09 11:52:26 -07009716 if (NAME_NOT_FOUND != val) {
9717 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9718 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9719 supported_scene_modes_cnt++;
9720 }
9721 }
9722 }
9723 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9724 avail_scene_modes,
9725 supported_scene_modes_cnt);
9726
9727 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9728 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9729 supported_scene_modes_cnt,
9730 CAM_SCENE_MODE_MAX,
9731 scene_mode_overrides,
9732 supported_indexes,
9733 cameraId);
9734
9735 if (supported_scene_modes_cnt == 0) {
9736 supported_scene_modes_cnt = 1;
9737 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9738 }
9739
9740 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9741 scene_mode_overrides, supported_scene_modes_cnt * 3);
9742
9743 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9744 ANDROID_CONTROL_MODE_AUTO,
9745 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9746 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9747 available_control_modes,
9748 3);
9749
9750 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9751 size = 0;
9752 count = CAM_ANTIBANDING_MODE_MAX;
9753 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9754 for (size_t i = 0; i < count; i++) {
9755 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9756 gCamCapability[cameraId]->supported_antibandings[i]);
9757 if (NAME_NOT_FOUND != val) {
9758 avail_antibanding_modes[size] = (uint8_t)val;
9759 size++;
9760 }
9761
9762 }
9763 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9764 avail_antibanding_modes,
9765 size);
9766
9767 uint8_t avail_abberation_modes[] = {
9768 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9769 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9770 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9771 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9772 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9773 if (0 == count) {
9774 // If no aberration correction modes are available for a device, this advertise OFF mode
9775 size = 1;
9776 } else {
9777 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9778 // So, advertize all 3 modes if atleast any one mode is supported as per the
9779 // new M requirement
9780 size = 3;
9781 }
9782 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9783 avail_abberation_modes,
9784 size);
9785
9786 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9787 size = 0;
9788 count = CAM_FOCUS_MODE_MAX;
9789 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9790 for (size_t i = 0; i < count; i++) {
9791 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9792 gCamCapability[cameraId]->supported_focus_modes[i]);
9793 if (NAME_NOT_FOUND != val) {
9794 avail_af_modes[size] = (uint8_t)val;
9795 size++;
9796 }
9797 }
9798 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9799 avail_af_modes,
9800 size);
9801
9802 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9803 size = 0;
9804 count = CAM_WB_MODE_MAX;
9805 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9806 for (size_t i = 0; i < count; i++) {
9807 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9808 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9809 gCamCapability[cameraId]->supported_white_balances[i]);
9810 if (NAME_NOT_FOUND != val) {
9811 avail_awb_modes[size] = (uint8_t)val;
9812 size++;
9813 }
9814 }
9815 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9816 avail_awb_modes,
9817 size);
9818
9819 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9820 count = CAM_FLASH_FIRING_LEVEL_MAX;
9821 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9822 count);
9823 for (size_t i = 0; i < count; i++) {
9824 available_flash_levels[i] =
9825 gCamCapability[cameraId]->supported_firing_levels[i];
9826 }
9827 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9828 available_flash_levels, count);
9829
9830 uint8_t flashAvailable;
9831 if (gCamCapability[cameraId]->flash_available)
9832 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9833 else
9834 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9835 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9836 &flashAvailable, 1);
9837
9838 Vector<uint8_t> avail_ae_modes;
9839 count = CAM_AE_MODE_MAX;
9840 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9841 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009842 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9843 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9844 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9845 }
9846 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009847 }
9848 if (flashAvailable) {
9849 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9850 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9851 }
9852 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9853 avail_ae_modes.array(),
9854 avail_ae_modes.size());
9855
9856 int32_t sensitivity_range[2];
9857 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9858 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9859 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9860 sensitivity_range,
9861 sizeof(sensitivity_range) / sizeof(int32_t));
9862
9863 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9864 &gCamCapability[cameraId]->max_analog_sensitivity,
9865 1);
9866
9867 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9868 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9869 &sensor_orientation,
9870 1);
9871
9872 int32_t max_output_streams[] = {
9873 MAX_STALLING_STREAMS,
9874 MAX_PROCESSED_STREAMS,
9875 MAX_RAW_STREAMS};
9876 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9877 max_output_streams,
9878 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9879
9880 uint8_t avail_leds = 0;
9881 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9882 &avail_leds, 0);
9883
9884 uint8_t focus_dist_calibrated;
9885 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9886 gCamCapability[cameraId]->focus_dist_calibrated);
9887 if (NAME_NOT_FOUND != val) {
9888 focus_dist_calibrated = (uint8_t)val;
9889 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9890 &focus_dist_calibrated, 1);
9891 }
9892
9893 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9894 size = 0;
9895 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9896 MAX_TEST_PATTERN_CNT);
9897 for (size_t i = 0; i < count; i++) {
9898 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9899 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9900 if (NAME_NOT_FOUND != testpatternMode) {
9901 avail_testpattern_modes[size] = testpatternMode;
9902 size++;
9903 }
9904 }
9905 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9906 avail_testpattern_modes,
9907 size);
9908
9909 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9910 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9911 &max_pipeline_depth,
9912 1);
9913
9914 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9915 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9916 &partial_result_count,
9917 1);
9918
9919 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9920 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9921
9922 Vector<uint8_t> available_capabilities;
9923 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9924 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9925 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9926 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9927 if (supportBurst) {
9928 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9929 }
9930 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9931 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9932 if (hfrEnable && available_hfr_configs.array()) {
9933 available_capabilities.add(
9934 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9935 }
9936
9937 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9938 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9939 }
9940 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9941 available_capabilities.array(),
9942 available_capabilities.size());
9943
9944 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9945 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9946 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9947 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9948
9949 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9950 &aeLockAvailable, 1);
9951
9952 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9953 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9954 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9955 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9956
9957 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9958 &awbLockAvailable, 1);
9959
9960 int32_t max_input_streams = 1;
9961 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9962 &max_input_streams,
9963 1);
9964
9965 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9966 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9967 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9968 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9969 HAL_PIXEL_FORMAT_YCbCr_420_888};
9970 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9971 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9972
9973 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9974 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9975 &max_latency,
9976 1);
9977
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009978#ifndef USE_HAL_3_3
9979 int32_t isp_sensitivity_range[2];
9980 isp_sensitivity_range[0] =
9981 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9982 isp_sensitivity_range[1] =
9983 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9984 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9985 isp_sensitivity_range,
9986 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9987#endif
9988
Thierry Strudel3d639192016-09-09 11:52:26 -07009989 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9990 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9991 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9992 available_hot_pixel_modes,
9993 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9994
9995 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9996 ANDROID_SHADING_MODE_FAST,
9997 ANDROID_SHADING_MODE_HIGH_QUALITY};
9998 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9999 available_shading_modes,
10000 3);
10001
10002 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10003 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10004 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10005 available_lens_shading_map_modes,
10006 2);
10007
10008 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10009 ANDROID_EDGE_MODE_FAST,
10010 ANDROID_EDGE_MODE_HIGH_QUALITY,
10011 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10012 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10013 available_edge_modes,
10014 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10015
10016 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10017 ANDROID_NOISE_REDUCTION_MODE_FAST,
10018 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10019 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10020 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10021 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10022 available_noise_red_modes,
10023 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10024
10025 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10026 ANDROID_TONEMAP_MODE_FAST,
10027 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10028 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10029 available_tonemap_modes,
10030 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10031
10032 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10033 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10034 available_hot_pixel_map_modes,
10035 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10036
10037 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10038 gCamCapability[cameraId]->reference_illuminant1);
10039 if (NAME_NOT_FOUND != val) {
10040 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10041 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10042 }
10043
10044 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10045 gCamCapability[cameraId]->reference_illuminant2);
10046 if (NAME_NOT_FOUND != val) {
10047 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10048 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10049 }
10050
10051 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10052 (void *)gCamCapability[cameraId]->forward_matrix1,
10053 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10054
10055 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10056 (void *)gCamCapability[cameraId]->forward_matrix2,
10057 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10058
10059 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10060 (void *)gCamCapability[cameraId]->color_transform1,
10061 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10062
10063 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10064 (void *)gCamCapability[cameraId]->color_transform2,
10065 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10066
10067 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10068 (void *)gCamCapability[cameraId]->calibration_transform1,
10069 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10070
10071 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10072 (void *)gCamCapability[cameraId]->calibration_transform2,
10073 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10074
10075 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10076 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10077 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10078 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10079 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10080 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10081 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10082 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10083 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10084 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10085 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10086 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10087 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10088 ANDROID_JPEG_GPS_COORDINATES,
10089 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10090 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10091 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10092 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10093 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10094 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10095 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10096 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10097 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10098 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010099#ifndef USE_HAL_3_3
10100 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10101#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010102 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010103 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010104 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10105 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010106 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010107 /* DevCamDebug metadata request_keys_basic */
10108 DEVCAMDEBUG_META_ENABLE,
10109 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010110 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010111 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010112 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010113 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010114 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010115 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010116
10117 size_t request_keys_cnt =
10118 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10119 Vector<int32_t> available_request_keys;
10120 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10121 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10122 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10123 }
10124
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010125 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010126 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10127 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10128 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010129 }
10130
Thierry Strudel3d639192016-09-09 11:52:26 -070010131 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10132 available_request_keys.array(), available_request_keys.size());
10133
10134 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10135 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10136 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10137 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10138 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10139 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10140 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10141 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10142 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10143 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10144 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10145 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10146 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10147 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10148 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10149 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10150 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010151 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010152 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10153 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10154 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010155 ANDROID_STATISTICS_FACE_SCORES,
10156#ifndef USE_HAL_3_3
10157 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10158#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010159 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010160 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010161 // DevCamDebug metadata result_keys_basic
10162 DEVCAMDEBUG_META_ENABLE,
10163 // DevCamDebug metadata result_keys AF
10164 DEVCAMDEBUG_AF_LENS_POSITION,
10165 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10166 DEVCAMDEBUG_AF_TOF_DISTANCE,
10167 DEVCAMDEBUG_AF_LUMA,
10168 DEVCAMDEBUG_AF_HAF_STATE,
10169 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10170 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10171 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10172 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10173 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10174 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10175 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10176 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10177 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10178 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10179 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10180 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10181 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10182 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10183 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10184 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10185 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10186 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10187 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10188 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10189 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10190 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10191 // DevCamDebug metadata result_keys AEC
10192 DEVCAMDEBUG_AEC_TARGET_LUMA,
10193 DEVCAMDEBUG_AEC_COMP_LUMA,
10194 DEVCAMDEBUG_AEC_AVG_LUMA,
10195 DEVCAMDEBUG_AEC_CUR_LUMA,
10196 DEVCAMDEBUG_AEC_LINECOUNT,
10197 DEVCAMDEBUG_AEC_REAL_GAIN,
10198 DEVCAMDEBUG_AEC_EXP_INDEX,
10199 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010200 // DevCamDebug metadata result_keys zzHDR
10201 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10202 DEVCAMDEBUG_AEC_L_LINECOUNT,
10203 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10204 DEVCAMDEBUG_AEC_S_LINECOUNT,
10205 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10206 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10207 // DevCamDebug metadata result_keys ADRC
10208 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10209 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10210 DEVCAMDEBUG_AEC_GTM_RATIO,
10211 DEVCAMDEBUG_AEC_LTM_RATIO,
10212 DEVCAMDEBUG_AEC_LA_RATIO,
10213 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010214 // DevCamDebug metadata result_keys AWB
10215 DEVCAMDEBUG_AWB_R_GAIN,
10216 DEVCAMDEBUG_AWB_G_GAIN,
10217 DEVCAMDEBUG_AWB_B_GAIN,
10218 DEVCAMDEBUG_AWB_CCT,
10219 DEVCAMDEBUG_AWB_DECISION,
10220 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010221 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10222 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10223 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010224 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010225 };
10226
Thierry Strudel3d639192016-09-09 11:52:26 -070010227 size_t result_keys_cnt =
10228 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10229
10230 Vector<int32_t> available_result_keys;
10231 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10232 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10233 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10234 }
10235 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10236 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10237 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10238 }
10239 if (supportedFaceDetectMode == 1) {
10240 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10241 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10242 } else if ((supportedFaceDetectMode == 2) ||
10243 (supportedFaceDetectMode == 3)) {
10244 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10245 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10246 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010247#ifndef USE_HAL_3_3
10248 if (hasBlackRegions) {
10249 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10250 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10251 }
10252#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010253
10254 if (gExposeEnableZslKey) {
10255 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10256 }
10257
Thierry Strudel3d639192016-09-09 11:52:26 -070010258 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10259 available_result_keys.array(), available_result_keys.size());
10260
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010261 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010262 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10263 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10264 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10265 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10266 ANDROID_SCALER_CROPPING_TYPE,
10267 ANDROID_SYNC_MAX_LATENCY,
10268 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10269 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10270 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10271 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10272 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10273 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10274 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10275 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10276 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10277 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10278 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10279 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10280 ANDROID_LENS_FACING,
10281 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10282 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10283 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10284 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10285 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10286 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10287 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10288 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10289 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10290 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10291 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10292 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10293 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10294 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10295 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10296 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10297 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10298 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10299 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10300 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010301 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010302 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10303 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10304 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10305 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10306 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10307 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10308 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10309 ANDROID_CONTROL_AVAILABLE_MODES,
10310 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10311 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10312 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10313 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010314 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10315#ifndef USE_HAL_3_3
10316 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10317 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10318#endif
10319 };
10320
10321 Vector<int32_t> available_characteristics_keys;
10322 available_characteristics_keys.appendArray(characteristics_keys_basic,
10323 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10324#ifndef USE_HAL_3_3
10325 if (hasBlackRegions) {
10326 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10327 }
10328#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010329
10330 if (0 <= indexPD) {
10331 int32_t depthKeys[] = {
10332 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10333 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10334 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10335 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10336 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10337 };
10338 available_characteristics_keys.appendArray(depthKeys,
10339 sizeof(depthKeys) / sizeof(depthKeys[0]));
10340 }
10341
Thierry Strudel3d639192016-09-09 11:52:26 -070010342 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010343 available_characteristics_keys.array(),
10344 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010345
10346 /*available stall durations depend on the hw + sw and will be different for different devices */
10347 /*have to add for raw after implementation*/
10348 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10349 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10350
10351 Vector<int64_t> available_stall_durations;
10352 for (uint32_t j = 0; j < stall_formats_count; j++) {
10353 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10354 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10355 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10356 available_stall_durations.add(stall_formats[j]);
10357 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10358 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10359 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10360 }
10361 } else {
10362 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10363 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10364 available_stall_durations.add(stall_formats[j]);
10365 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10366 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10367 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10368 }
10369 }
10370 }
10371 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10372 available_stall_durations.array(),
10373 available_stall_durations.size());
10374
10375 //QCAMERA3_OPAQUE_RAW
10376 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10377 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10378 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10379 case LEGACY_RAW:
10380 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10381 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10382 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10383 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10384 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10385 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10386 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10387 break;
10388 case MIPI_RAW:
10389 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10390 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10391 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10392 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10393 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10394 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10395 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10396 break;
10397 default:
10398 LOGE("unknown opaque_raw_format %d",
10399 gCamCapability[cameraId]->opaque_raw_fmt);
10400 break;
10401 }
10402 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10403
10404 Vector<int32_t> strides;
10405 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10406 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10407 cam_stream_buf_plane_info_t buf_planes;
10408 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10409 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10410 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10411 &gCamCapability[cameraId]->padding_info, &buf_planes);
10412 strides.add(buf_planes.plane_info.mp[0].stride);
10413 }
10414 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10415 strides.size());
10416
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010417 //TBD: remove the following line once backend advertises zzHDR in feature mask
10418 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010419 //Video HDR default
10420 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10421 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010422 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010423 int32_t vhdr_mode[] = {
10424 QCAMERA3_VIDEO_HDR_MODE_OFF,
10425 QCAMERA3_VIDEO_HDR_MODE_ON};
10426
10427 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10428 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10429 vhdr_mode, vhdr_mode_count);
10430 }
10431
Thierry Strudel3d639192016-09-09 11:52:26 -070010432 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10433 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10434 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10435
10436 uint8_t isMonoOnly =
10437 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10438 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10439 &isMonoOnly, 1);
10440
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010441#ifndef USE_HAL_3_3
10442 Vector<int32_t> opaque_size;
10443 for (size_t j = 0; j < scalar_formats_count; j++) {
10444 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10445 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10446 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10447 cam_stream_buf_plane_info_t buf_planes;
10448
10449 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10450 &gCamCapability[cameraId]->padding_info, &buf_planes);
10451
10452 if (rc == 0) {
10453 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10454 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10455 opaque_size.add(buf_planes.plane_info.frame_len);
10456 }else {
10457 LOGE("raw frame calculation failed!");
10458 }
10459 }
10460 }
10461 }
10462
10463 if ((opaque_size.size() > 0) &&
10464 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10465 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10466 else
10467 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10468#endif
10469
Thierry Strudel04e026f2016-10-10 11:27:36 -070010470 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10471 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10472 size = 0;
10473 count = CAM_IR_MODE_MAX;
10474 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10475 for (size_t i = 0; i < count; i++) {
10476 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10477 gCamCapability[cameraId]->supported_ir_modes[i]);
10478 if (NAME_NOT_FOUND != val) {
10479 avail_ir_modes[size] = (int32_t)val;
10480 size++;
10481 }
10482 }
10483 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10484 avail_ir_modes, size);
10485 }
10486
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010487 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10488 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10489 size = 0;
10490 count = CAM_AEC_CONVERGENCE_MAX;
10491 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10492 for (size_t i = 0; i < count; i++) {
10493 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10494 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10495 if (NAME_NOT_FOUND != val) {
10496 available_instant_aec_modes[size] = (int32_t)val;
10497 size++;
10498 }
10499 }
10500 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10501 available_instant_aec_modes, size);
10502 }
10503
Thierry Strudel54dc9782017-02-15 12:12:10 -080010504 int32_t sharpness_range[] = {
10505 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10506 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10507 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10508
10509 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10510 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10511 size = 0;
10512 count = CAM_BINNING_CORRECTION_MODE_MAX;
10513 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10514 for (size_t i = 0; i < count; i++) {
10515 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10516 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10517 gCamCapability[cameraId]->supported_binning_modes[i]);
10518 if (NAME_NOT_FOUND != val) {
10519 avail_binning_modes[size] = (int32_t)val;
10520 size++;
10521 }
10522 }
10523 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10524 avail_binning_modes, size);
10525 }
10526
10527 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10528 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10529 size = 0;
10530 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10531 for (size_t i = 0; i < count; i++) {
10532 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10533 gCamCapability[cameraId]->supported_aec_modes[i]);
10534 if (NAME_NOT_FOUND != val)
10535 available_aec_modes[size++] = val;
10536 }
10537 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10538 available_aec_modes, size);
10539 }
10540
10541 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10542 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10543 size = 0;
10544 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10545 for (size_t i = 0; i < count; i++) {
10546 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10547 gCamCapability[cameraId]->supported_iso_modes[i]);
10548 if (NAME_NOT_FOUND != val)
10549 available_iso_modes[size++] = val;
10550 }
10551 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10552 available_iso_modes, size);
10553 }
10554
10555 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010556 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010557 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10558 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10559 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10560
10561 int32_t available_saturation_range[4];
10562 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10563 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10564 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10565 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10566 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10567 available_saturation_range, 4);
10568
10569 uint8_t is_hdr_values[2];
10570 is_hdr_values[0] = 0;
10571 is_hdr_values[1] = 1;
10572 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10573 is_hdr_values, 2);
10574
10575 float is_hdr_confidence_range[2];
10576 is_hdr_confidence_range[0] = 0.0;
10577 is_hdr_confidence_range[1] = 1.0;
10578 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10579 is_hdr_confidence_range, 2);
10580
Emilian Peev0a972ef2017-03-16 10:25:53 +000010581 size_t eepromLength = strnlen(
10582 reinterpret_cast<const char *>(
10583 gCamCapability[cameraId]->eeprom_version_info),
10584 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10585 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010586 char easelInfo[] = ",E:N";
10587 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10588 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10589 eepromLength += sizeof(easelInfo);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010590 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10591 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010592 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010593 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010594 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10595 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10596 }
10597
Thierry Strudel3d639192016-09-09 11:52:26 -070010598 gStaticMetadata[cameraId] = staticInfo.release();
10599 return rc;
10600}
10601
10602/*===========================================================================
10603 * FUNCTION : makeTable
10604 *
10605 * DESCRIPTION: make a table of sizes
10606 *
10607 * PARAMETERS :
10608 *
10609 *
10610 *==========================================================================*/
10611void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10612 size_t max_size, int32_t *sizeTable)
10613{
10614 size_t j = 0;
10615 if (size > max_size) {
10616 size = max_size;
10617 }
10618 for (size_t i = 0; i < size; i++) {
10619 sizeTable[j] = dimTable[i].width;
10620 sizeTable[j+1] = dimTable[i].height;
10621 j+=2;
10622 }
10623}
10624
10625/*===========================================================================
10626 * FUNCTION : makeFPSTable
10627 *
10628 * DESCRIPTION: make a table of fps ranges
10629 *
10630 * PARAMETERS :
10631 *
10632 *==========================================================================*/
10633void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10634 size_t max_size, int32_t *fpsRangesTable)
10635{
10636 size_t j = 0;
10637 if (size > max_size) {
10638 size = max_size;
10639 }
10640 for (size_t i = 0; i < size; i++) {
10641 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10642 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10643 j+=2;
10644 }
10645}
10646
10647/*===========================================================================
10648 * FUNCTION : makeOverridesList
10649 *
10650 * DESCRIPTION: make a list of scene mode overrides
10651 *
10652 * PARAMETERS :
10653 *
10654 *
10655 *==========================================================================*/
10656void QCamera3HardwareInterface::makeOverridesList(
10657 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10658 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10659{
10660 /*daemon will give a list of overrides for all scene modes.
10661 However we should send the fwk only the overrides for the scene modes
10662 supported by the framework*/
10663 size_t j = 0;
10664 if (size > max_size) {
10665 size = max_size;
10666 }
10667 size_t focus_count = CAM_FOCUS_MODE_MAX;
10668 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10669 focus_count);
10670 for (size_t i = 0; i < size; i++) {
10671 bool supt = false;
10672 size_t index = supported_indexes[i];
10673 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10674 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10675 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10676 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10677 overridesTable[index].awb_mode);
10678 if (NAME_NOT_FOUND != val) {
10679 overridesList[j+1] = (uint8_t)val;
10680 }
10681 uint8_t focus_override = overridesTable[index].af_mode;
10682 for (size_t k = 0; k < focus_count; k++) {
10683 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10684 supt = true;
10685 break;
10686 }
10687 }
10688 if (supt) {
10689 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10690 focus_override);
10691 if (NAME_NOT_FOUND != val) {
10692 overridesList[j+2] = (uint8_t)val;
10693 }
10694 } else {
10695 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10696 }
10697 j+=3;
10698 }
10699}
10700
10701/*===========================================================================
10702 * FUNCTION : filterJpegSizes
10703 *
10704 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10705 * could be downscaled to
10706 *
10707 * PARAMETERS :
10708 *
10709 * RETURN : length of jpegSizes array
10710 *==========================================================================*/
10711
10712size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10713 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10714 uint8_t downscale_factor)
10715{
10716 if (0 == downscale_factor) {
10717 downscale_factor = 1;
10718 }
10719
10720 int32_t min_width = active_array_size.width / downscale_factor;
10721 int32_t min_height = active_array_size.height / downscale_factor;
10722 size_t jpegSizesCnt = 0;
10723 if (processedSizesCnt > maxCount) {
10724 processedSizesCnt = maxCount;
10725 }
10726 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10727 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10728 jpegSizes[jpegSizesCnt] = processedSizes[i];
10729 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10730 jpegSizesCnt += 2;
10731 }
10732 }
10733 return jpegSizesCnt;
10734}
10735
10736/*===========================================================================
10737 * FUNCTION : computeNoiseModelEntryS
10738 *
10739 * DESCRIPTION: function to map a given sensitivity to the S noise
10740 * model parameters in the DNG noise model.
10741 *
10742 * PARAMETERS : sens : the sensor sensitivity
10743 *
10744 ** RETURN : S (sensor amplification) noise
10745 *
10746 *==========================================================================*/
10747double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10748 double s = gCamCapability[mCameraId]->gradient_S * sens +
10749 gCamCapability[mCameraId]->offset_S;
10750 return ((s < 0.0) ? 0.0 : s);
10751}
10752
10753/*===========================================================================
10754 * FUNCTION : computeNoiseModelEntryO
10755 *
10756 * DESCRIPTION: function to map a given sensitivity to the O noise
10757 * model parameters in the DNG noise model.
10758 *
10759 * PARAMETERS : sens : the sensor sensitivity
10760 *
10761 ** RETURN : O (sensor readout) noise
10762 *
10763 *==========================================================================*/
10764double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10765 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10766 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10767 1.0 : (1.0 * sens / max_analog_sens);
10768 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10769 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10770 return ((o < 0.0) ? 0.0 : o);
10771}
10772
10773/*===========================================================================
10774 * FUNCTION : getSensorSensitivity
10775 *
10776 * DESCRIPTION: convert iso_mode to an integer value
10777 *
10778 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10779 *
10780 ** RETURN : sensitivity supported by sensor
10781 *
10782 *==========================================================================*/
10783int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10784{
10785 int32_t sensitivity;
10786
10787 switch (iso_mode) {
10788 case CAM_ISO_MODE_100:
10789 sensitivity = 100;
10790 break;
10791 case CAM_ISO_MODE_200:
10792 sensitivity = 200;
10793 break;
10794 case CAM_ISO_MODE_400:
10795 sensitivity = 400;
10796 break;
10797 case CAM_ISO_MODE_800:
10798 sensitivity = 800;
10799 break;
10800 case CAM_ISO_MODE_1600:
10801 sensitivity = 1600;
10802 break;
10803 default:
10804 sensitivity = -1;
10805 break;
10806 }
10807 return sensitivity;
10808}
10809
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010810int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010811 if (gEaselManagerClient == nullptr) {
10812 gEaselManagerClient = EaselManagerClient::create();
10813 if (gEaselManagerClient == nullptr) {
10814 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10815 return -ENODEV;
10816 }
10817 }
10818
10819 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010820 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10821 // to connect to Easel.
10822 bool doNotpowerOnEasel =
10823 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10824
10825 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010826 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10827 return OK;
10828 }
10829
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010830 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010831 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010832 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010833 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010834 return res;
10835 }
10836
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010837 EaselManagerClientOpened = true;
10838
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010839 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010840 if (res != OK) {
10841 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10842 }
10843
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010844 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010845 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010846
10847 // Expose enableZsl key only when HDR+ mode is enabled.
10848 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010849 }
10850
10851 return OK;
10852}
10853
Thierry Strudel3d639192016-09-09 11:52:26 -070010854/*===========================================================================
10855 * FUNCTION : getCamInfo
10856 *
10857 * DESCRIPTION: query camera capabilities
10858 *
10859 * PARAMETERS :
10860 * @cameraId : camera Id
10861 * @info : camera info struct to be filled in with camera capabilities
10862 *
10863 * RETURN : int type of status
10864 * NO_ERROR -- success
10865 * none-zero failure code
10866 *==========================================================================*/
10867int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10868 struct camera_info *info)
10869{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010870 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010871 int rc = 0;
10872
10873 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010874
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010875 {
10876 Mutex::Autolock l(gHdrPlusClientLock);
10877 rc = initHdrPlusClientLocked();
10878 if (rc != OK) {
10879 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10880 pthread_mutex_unlock(&gCamLock);
10881 return rc;
10882 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010883 }
10884
Thierry Strudel3d639192016-09-09 11:52:26 -070010885 if (NULL == gCamCapability[cameraId]) {
10886 rc = initCapabilities(cameraId);
10887 if (rc < 0) {
10888 pthread_mutex_unlock(&gCamLock);
10889 return rc;
10890 }
10891 }
10892
10893 if (NULL == gStaticMetadata[cameraId]) {
10894 rc = initStaticMetadata(cameraId);
10895 if (rc < 0) {
10896 pthread_mutex_unlock(&gCamLock);
10897 return rc;
10898 }
10899 }
10900
10901 switch(gCamCapability[cameraId]->position) {
10902 case CAM_POSITION_BACK:
10903 case CAM_POSITION_BACK_AUX:
10904 info->facing = CAMERA_FACING_BACK;
10905 break;
10906
10907 case CAM_POSITION_FRONT:
10908 case CAM_POSITION_FRONT_AUX:
10909 info->facing = CAMERA_FACING_FRONT;
10910 break;
10911
10912 default:
10913 LOGE("Unknown position type %d for camera id:%d",
10914 gCamCapability[cameraId]->position, cameraId);
10915 rc = -1;
10916 break;
10917 }
10918
10919
10920 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010921#ifndef USE_HAL_3_3
10922 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10923#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010924 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010925#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010926 info->static_camera_characteristics = gStaticMetadata[cameraId];
10927
10928 //For now assume both cameras can operate independently.
10929 info->conflicting_devices = NULL;
10930 info->conflicting_devices_length = 0;
10931
10932 //resource cost is 100 * MIN(1.0, m/M),
10933 //where m is throughput requirement with maximum stream configuration
10934 //and M is CPP maximum throughput.
10935 float max_fps = 0.0;
10936 for (uint32_t i = 0;
10937 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10938 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10939 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10940 }
10941 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10942 gCamCapability[cameraId]->active_array_size.width *
10943 gCamCapability[cameraId]->active_array_size.height * max_fps /
10944 gCamCapability[cameraId]->max_pixel_bandwidth;
10945 info->resource_cost = 100 * MIN(1.0, ratio);
10946 LOGI("camera %d resource cost is %d", cameraId,
10947 info->resource_cost);
10948
10949 pthread_mutex_unlock(&gCamLock);
10950 return rc;
10951}
10952
10953/*===========================================================================
10954 * FUNCTION : translateCapabilityToMetadata
10955 *
10956 * DESCRIPTION: translate the capability into camera_metadata_t
10957 *
10958 * PARAMETERS : type of the request
10959 *
10960 *
10961 * RETURN : success: camera_metadata_t*
10962 * failure: NULL
10963 *
10964 *==========================================================================*/
10965camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10966{
10967 if (mDefaultMetadata[type] != NULL) {
10968 return mDefaultMetadata[type];
10969 }
10970 //first time we are handling this request
10971 //fill up the metadata structure using the wrapper class
10972 CameraMetadata settings;
10973 //translate from cam_capability_t to camera_metadata_tag_t
10974 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10975 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10976 int32_t defaultRequestID = 0;
10977 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10978
10979 /* OIS disable */
10980 char ois_prop[PROPERTY_VALUE_MAX];
10981 memset(ois_prop, 0, sizeof(ois_prop));
10982 property_get("persist.camera.ois.disable", ois_prop, "0");
10983 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10984
10985 /* Force video to use OIS */
10986 char videoOisProp[PROPERTY_VALUE_MAX];
10987 memset(videoOisProp, 0, sizeof(videoOisProp));
10988 property_get("persist.camera.ois.video", videoOisProp, "1");
10989 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010990
10991 // Hybrid AE enable/disable
10992 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10993 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10994 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10995 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10996
Thierry Strudel3d639192016-09-09 11:52:26 -070010997 uint8_t controlIntent = 0;
10998 uint8_t focusMode;
10999 uint8_t vsMode;
11000 uint8_t optStabMode;
11001 uint8_t cacMode;
11002 uint8_t edge_mode;
11003 uint8_t noise_red_mode;
11004 uint8_t tonemap_mode;
11005 bool highQualityModeEntryAvailable = FALSE;
11006 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011007 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011008 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11009 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011010 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011011 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011012 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011013
Thierry Strudel3d639192016-09-09 11:52:26 -070011014 switch (type) {
11015 case CAMERA3_TEMPLATE_PREVIEW:
11016 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11017 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11018 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11019 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11020 edge_mode = ANDROID_EDGE_MODE_FAST;
11021 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11022 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11023 break;
11024 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11025 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11026 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11027 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11028 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11029 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11030 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11031 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11032 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11033 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11034 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11035 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11036 highQualityModeEntryAvailable = TRUE;
11037 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11038 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11039 fastModeEntryAvailable = TRUE;
11040 }
11041 }
11042 if (highQualityModeEntryAvailable) {
11043 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11044 } else if (fastModeEntryAvailable) {
11045 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11046 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011047 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11048 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11049 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011050 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011051 break;
11052 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11053 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11054 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11055 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011056 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11057 edge_mode = ANDROID_EDGE_MODE_FAST;
11058 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11059 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11060 if (forceVideoOis)
11061 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11062 break;
11063 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11064 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11065 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11066 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011067 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11068 edge_mode = ANDROID_EDGE_MODE_FAST;
11069 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11070 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11071 if (forceVideoOis)
11072 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11073 break;
11074 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11075 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11076 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11077 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11078 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11079 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11080 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11081 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11082 break;
11083 case CAMERA3_TEMPLATE_MANUAL:
11084 edge_mode = ANDROID_EDGE_MODE_FAST;
11085 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11086 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11087 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11088 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11089 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11090 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11091 break;
11092 default:
11093 edge_mode = ANDROID_EDGE_MODE_FAST;
11094 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11095 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11096 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11097 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11098 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11099 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11100 break;
11101 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011102 // Set CAC to OFF if underlying device doesn't support
11103 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11104 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11105 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011106 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11107 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11108 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11109 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11110 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11111 }
11112 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011113 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011114 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011115
11116 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11117 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11118 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11119 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11120 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11121 || ois_disable)
11122 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11123 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011124 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011125
11126 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11127 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11128
11129 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11130 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11131
11132 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11133 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11134
11135 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11136 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11137
11138 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11139 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11140
11141 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11142 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11143
11144 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11145 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11146
11147 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11148 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11149
11150 /*flash*/
11151 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11152 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11153
11154 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11155 settings.update(ANDROID_FLASH_FIRING_POWER,
11156 &flashFiringLevel, 1);
11157
11158 /* lens */
11159 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11160 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11161
11162 if (gCamCapability[mCameraId]->filter_densities_count) {
11163 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11164 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11165 gCamCapability[mCameraId]->filter_densities_count);
11166 }
11167
11168 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11169 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11170
Thierry Strudel3d639192016-09-09 11:52:26 -070011171 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11172 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11173
11174 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11175 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11176
11177 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11178 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11179
11180 /* face detection (default to OFF) */
11181 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11182 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11183
Thierry Strudel54dc9782017-02-15 12:12:10 -080011184 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11185 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011186
11187 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11188 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11189
11190 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11191 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11192
Thierry Strudel3d639192016-09-09 11:52:26 -070011193
11194 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11195 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11196
11197 /* Exposure time(Update the Min Exposure Time)*/
11198 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11199 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11200
11201 /* frame duration */
11202 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11203 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11204
11205 /* sensitivity */
11206 static const int32_t default_sensitivity = 100;
11207 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011208#ifndef USE_HAL_3_3
11209 static const int32_t default_isp_sensitivity =
11210 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11211 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11212#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011213
11214 /*edge mode*/
11215 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11216
11217 /*noise reduction mode*/
11218 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11219
11220 /*color correction mode*/
11221 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11222 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11223
11224 /*transform matrix mode*/
11225 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11226
11227 int32_t scaler_crop_region[4];
11228 scaler_crop_region[0] = 0;
11229 scaler_crop_region[1] = 0;
11230 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11231 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11232 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11233
11234 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11235 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11236
11237 /*focus distance*/
11238 float focus_distance = 0.0;
11239 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11240
11241 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011242 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011243 float max_range = 0.0;
11244 float max_fixed_fps = 0.0;
11245 int32_t fps_range[2] = {0, 0};
11246 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11247 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011248 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11249 TEMPLATE_MAX_PREVIEW_FPS) {
11250 continue;
11251 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011252 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11253 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11254 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11255 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11256 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11257 if (range > max_range) {
11258 fps_range[0] =
11259 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11260 fps_range[1] =
11261 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11262 max_range = range;
11263 }
11264 } else {
11265 if (range < 0.01 && max_fixed_fps <
11266 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11267 fps_range[0] =
11268 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11269 fps_range[1] =
11270 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11271 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11272 }
11273 }
11274 }
11275 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11276
11277 /*precapture trigger*/
11278 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11279 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11280
11281 /*af trigger*/
11282 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11283 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11284
11285 /* ae & af regions */
11286 int32_t active_region[] = {
11287 gCamCapability[mCameraId]->active_array_size.left,
11288 gCamCapability[mCameraId]->active_array_size.top,
11289 gCamCapability[mCameraId]->active_array_size.left +
11290 gCamCapability[mCameraId]->active_array_size.width,
11291 gCamCapability[mCameraId]->active_array_size.top +
11292 gCamCapability[mCameraId]->active_array_size.height,
11293 0};
11294 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11295 sizeof(active_region) / sizeof(active_region[0]));
11296 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11297 sizeof(active_region) / sizeof(active_region[0]));
11298
11299 /* black level lock */
11300 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11301 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11302
Thierry Strudel3d639192016-09-09 11:52:26 -070011303 //special defaults for manual template
11304 if (type == CAMERA3_TEMPLATE_MANUAL) {
11305 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11306 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11307
11308 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11309 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11310
11311 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11312 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11313
11314 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11315 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11316
11317 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11318 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11319
11320 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11321 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11322 }
11323
11324
11325 /* TNR
11326 * We'll use this location to determine which modes TNR will be set.
11327 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11328 * This is not to be confused with linking on a per stream basis that decision
11329 * is still on per-session basis and will be handled as part of config stream
11330 */
11331 uint8_t tnr_enable = 0;
11332
11333 if (m_bTnrPreview || m_bTnrVideo) {
11334
11335 switch (type) {
11336 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11337 tnr_enable = 1;
11338 break;
11339
11340 default:
11341 tnr_enable = 0;
11342 break;
11343 }
11344
11345 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11346 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11347 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11348
11349 LOGD("TNR:%d with process plate %d for template:%d",
11350 tnr_enable, tnr_process_type, type);
11351 }
11352
11353 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011354 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011355 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11356
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011357 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011358 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11359
Shuzhen Wang920ea402017-05-03 08:49:39 -070011360 uint8_t related_camera_id = mCameraId;
11361 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011362
11363 /* CDS default */
11364 char prop[PROPERTY_VALUE_MAX];
11365 memset(prop, 0, sizeof(prop));
11366 property_get("persist.camera.CDS", prop, "Auto");
11367 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11368 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11369 if (CAM_CDS_MODE_MAX == cds_mode) {
11370 cds_mode = CAM_CDS_MODE_AUTO;
11371 }
11372
11373 /* Disabling CDS in templates which have TNR enabled*/
11374 if (tnr_enable)
11375 cds_mode = CAM_CDS_MODE_OFF;
11376
11377 int32_t mode = cds_mode;
11378 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011379
Thierry Strudel269c81a2016-10-12 12:13:59 -070011380 /* Manual Convergence AEC Speed is disabled by default*/
11381 float default_aec_speed = 0;
11382 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11383
11384 /* Manual Convergence AWB Speed is disabled by default*/
11385 float default_awb_speed = 0;
11386 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11387
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011388 // Set instant AEC to normal convergence by default
11389 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11390 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11391
Shuzhen Wang19463d72016-03-08 11:09:52 -080011392 /* hybrid ae */
11393 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11394
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011395 if (gExposeEnableZslKey) {
11396 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11397 }
11398
Thierry Strudel3d639192016-09-09 11:52:26 -070011399 mDefaultMetadata[type] = settings.release();
11400
11401 return mDefaultMetadata[type];
11402}
11403
11404/*===========================================================================
11405 * FUNCTION : setFrameParameters
11406 *
11407 * DESCRIPTION: set parameters per frame as requested in the metadata from
11408 * framework
11409 *
11410 * PARAMETERS :
11411 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011412 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011413 * @blob_request: Whether this request is a blob request or not
11414 *
11415 * RETURN : success: NO_ERROR
11416 * failure:
11417 *==========================================================================*/
11418int QCamera3HardwareInterface::setFrameParameters(
11419 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011420 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011421 int blob_request,
11422 uint32_t snapshotStreamId)
11423{
11424 /*translate from camera_metadata_t type to parm_type_t*/
11425 int rc = 0;
11426 int32_t hal_version = CAM_HAL_V3;
11427
11428 clear_metadata_buffer(mParameters);
11429 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11430 LOGE("Failed to set hal version in the parameters");
11431 return BAD_VALUE;
11432 }
11433
11434 /*we need to update the frame number in the parameters*/
11435 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11436 request->frame_number)) {
11437 LOGE("Failed to set the frame number in the parameters");
11438 return BAD_VALUE;
11439 }
11440
11441 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011442 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011443 LOGE("Failed to set stream type mask in the parameters");
11444 return BAD_VALUE;
11445 }
11446
11447 if (mUpdateDebugLevel) {
11448 uint32_t dummyDebugLevel = 0;
11449 /* The value of dummyDebugLevel is irrelavent. On
11450 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11451 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11452 dummyDebugLevel)) {
11453 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11454 return BAD_VALUE;
11455 }
11456 mUpdateDebugLevel = false;
11457 }
11458
11459 if(request->settings != NULL){
11460 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11461 if (blob_request)
11462 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11463 }
11464
11465 return rc;
11466}
11467
11468/*===========================================================================
11469 * FUNCTION : setReprocParameters
11470 *
11471 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11472 * return it.
11473 *
11474 * PARAMETERS :
11475 * @request : request that needs to be serviced
11476 *
11477 * RETURN : success: NO_ERROR
11478 * failure:
11479 *==========================================================================*/
11480int32_t QCamera3HardwareInterface::setReprocParameters(
11481 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11482 uint32_t snapshotStreamId)
11483{
11484 /*translate from camera_metadata_t type to parm_type_t*/
11485 int rc = 0;
11486
11487 if (NULL == request->settings){
11488 LOGE("Reprocess settings cannot be NULL");
11489 return BAD_VALUE;
11490 }
11491
11492 if (NULL == reprocParam) {
11493 LOGE("Invalid reprocessing metadata buffer");
11494 return BAD_VALUE;
11495 }
11496 clear_metadata_buffer(reprocParam);
11497
11498 /*we need to update the frame number in the parameters*/
11499 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11500 request->frame_number)) {
11501 LOGE("Failed to set the frame number in the parameters");
11502 return BAD_VALUE;
11503 }
11504
11505 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11506 if (rc < 0) {
11507 LOGE("Failed to translate reproc request");
11508 return rc;
11509 }
11510
11511 CameraMetadata frame_settings;
11512 frame_settings = request->settings;
11513 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11514 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11515 int32_t *crop_count =
11516 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11517 int32_t *crop_data =
11518 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11519 int32_t *roi_map =
11520 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11521 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11522 cam_crop_data_t crop_meta;
11523 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11524 crop_meta.num_of_streams = 1;
11525 crop_meta.crop_info[0].crop.left = crop_data[0];
11526 crop_meta.crop_info[0].crop.top = crop_data[1];
11527 crop_meta.crop_info[0].crop.width = crop_data[2];
11528 crop_meta.crop_info[0].crop.height = crop_data[3];
11529
11530 crop_meta.crop_info[0].roi_map.left =
11531 roi_map[0];
11532 crop_meta.crop_info[0].roi_map.top =
11533 roi_map[1];
11534 crop_meta.crop_info[0].roi_map.width =
11535 roi_map[2];
11536 crop_meta.crop_info[0].roi_map.height =
11537 roi_map[3];
11538
11539 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11540 rc = BAD_VALUE;
11541 }
11542 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11543 request->input_buffer->stream,
11544 crop_meta.crop_info[0].crop.left,
11545 crop_meta.crop_info[0].crop.top,
11546 crop_meta.crop_info[0].crop.width,
11547 crop_meta.crop_info[0].crop.height);
11548 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11549 request->input_buffer->stream,
11550 crop_meta.crop_info[0].roi_map.left,
11551 crop_meta.crop_info[0].roi_map.top,
11552 crop_meta.crop_info[0].roi_map.width,
11553 crop_meta.crop_info[0].roi_map.height);
11554 } else {
11555 LOGE("Invalid reprocess crop count %d!", *crop_count);
11556 }
11557 } else {
11558 LOGE("No crop data from matching output stream");
11559 }
11560
11561 /* These settings are not needed for regular requests so handle them specially for
11562 reprocess requests; information needed for EXIF tags */
11563 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11564 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11565 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11566 if (NAME_NOT_FOUND != val) {
11567 uint32_t flashMode = (uint32_t)val;
11568 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11569 rc = BAD_VALUE;
11570 }
11571 } else {
11572 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11573 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11574 }
11575 } else {
11576 LOGH("No flash mode in reprocess settings");
11577 }
11578
11579 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11580 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11581 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11582 rc = BAD_VALUE;
11583 }
11584 } else {
11585 LOGH("No flash state in reprocess settings");
11586 }
11587
11588 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11589 uint8_t *reprocessFlags =
11590 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11591 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11592 *reprocessFlags)) {
11593 rc = BAD_VALUE;
11594 }
11595 }
11596
Thierry Strudel54dc9782017-02-15 12:12:10 -080011597 // Add exif debug data to internal metadata
11598 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11599 mm_jpeg_debug_exif_params_t *debug_params =
11600 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11601 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11602 // AE
11603 if (debug_params->ae_debug_params_valid == TRUE) {
11604 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11605 debug_params->ae_debug_params);
11606 }
11607 // AWB
11608 if (debug_params->awb_debug_params_valid == TRUE) {
11609 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11610 debug_params->awb_debug_params);
11611 }
11612 // AF
11613 if (debug_params->af_debug_params_valid == TRUE) {
11614 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11615 debug_params->af_debug_params);
11616 }
11617 // ASD
11618 if (debug_params->asd_debug_params_valid == TRUE) {
11619 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11620 debug_params->asd_debug_params);
11621 }
11622 // Stats
11623 if (debug_params->stats_debug_params_valid == TRUE) {
11624 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11625 debug_params->stats_debug_params);
11626 }
11627 // BE Stats
11628 if (debug_params->bestats_debug_params_valid == TRUE) {
11629 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11630 debug_params->bestats_debug_params);
11631 }
11632 // BHIST
11633 if (debug_params->bhist_debug_params_valid == TRUE) {
11634 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11635 debug_params->bhist_debug_params);
11636 }
11637 // 3A Tuning
11638 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11639 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11640 debug_params->q3a_tuning_debug_params);
11641 }
11642 }
11643
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011644 // Add metadata which reprocess needs
11645 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11646 cam_reprocess_info_t *repro_info =
11647 (cam_reprocess_info_t *)frame_settings.find
11648 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011649 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011650 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011651 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011652 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011653 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011654 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011655 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011656 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011657 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011658 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011659 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011660 repro_info->pipeline_flip);
11661 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11662 repro_info->af_roi);
11663 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11664 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011665 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11666 CAM_INTF_PARM_ROTATION metadata then has been added in
11667 translateToHalMetadata. HAL need to keep this new rotation
11668 metadata. Otherwise, the old rotation info saved in the vendor tag
11669 would be used */
11670 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11671 CAM_INTF_PARM_ROTATION, reprocParam) {
11672 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11673 } else {
11674 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011675 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011676 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011677 }
11678
11679 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11680 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11681 roi.width and roi.height would be the final JPEG size.
11682 For now, HAL only checks this for reprocess request */
11683 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11684 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11685 uint8_t *enable =
11686 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11687 if (*enable == TRUE) {
11688 int32_t *crop_data =
11689 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11690 cam_stream_crop_info_t crop_meta;
11691 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11692 crop_meta.stream_id = 0;
11693 crop_meta.crop.left = crop_data[0];
11694 crop_meta.crop.top = crop_data[1];
11695 crop_meta.crop.width = crop_data[2];
11696 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011697 // The JPEG crop roi should match cpp output size
11698 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11699 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11700 crop_meta.roi_map.left = 0;
11701 crop_meta.roi_map.top = 0;
11702 crop_meta.roi_map.width = cpp_crop->crop.width;
11703 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011704 }
11705 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11706 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011707 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011708 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011709 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11710 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011711 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011712 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11713
11714 // Add JPEG scale information
11715 cam_dimension_t scale_dim;
11716 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11717 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11718 int32_t *roi =
11719 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11720 scale_dim.width = roi[2];
11721 scale_dim.height = roi[3];
11722 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11723 scale_dim);
11724 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11725 scale_dim.width, scale_dim.height, mCameraId);
11726 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011727 }
11728 }
11729
11730 return rc;
11731}
11732
11733/*===========================================================================
11734 * FUNCTION : saveRequestSettings
11735 *
11736 * DESCRIPTION: Add any settings that might have changed to the request settings
11737 * and save the settings to be applied on the frame
11738 *
11739 * PARAMETERS :
11740 * @jpegMetadata : the extracted and/or modified jpeg metadata
11741 * @request : request with initial settings
11742 *
11743 * RETURN :
11744 * camera_metadata_t* : pointer to the saved request settings
11745 *==========================================================================*/
11746camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11747 const CameraMetadata &jpegMetadata,
11748 camera3_capture_request_t *request)
11749{
11750 camera_metadata_t *resultMetadata;
11751 CameraMetadata camMetadata;
11752 camMetadata = request->settings;
11753
11754 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11755 int32_t thumbnail_size[2];
11756 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11757 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11758 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11759 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11760 }
11761
11762 if (request->input_buffer != NULL) {
11763 uint8_t reprocessFlags = 1;
11764 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11765 (uint8_t*)&reprocessFlags,
11766 sizeof(reprocessFlags));
11767 }
11768
11769 resultMetadata = camMetadata.release();
11770 return resultMetadata;
11771}
11772
11773/*===========================================================================
11774 * FUNCTION : setHalFpsRange
11775 *
11776 * DESCRIPTION: set FPS range parameter
11777 *
11778 *
11779 * PARAMETERS :
11780 * @settings : Metadata from framework
11781 * @hal_metadata: Metadata buffer
11782 *
11783 *
11784 * RETURN : success: NO_ERROR
11785 * failure:
11786 *==========================================================================*/
11787int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11788 metadata_buffer_t *hal_metadata)
11789{
11790 int32_t rc = NO_ERROR;
11791 cam_fps_range_t fps_range;
11792 fps_range.min_fps = (float)
11793 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11794 fps_range.max_fps = (float)
11795 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11796 fps_range.video_min_fps = fps_range.min_fps;
11797 fps_range.video_max_fps = fps_range.max_fps;
11798
11799 LOGD("aeTargetFpsRange fps: [%f %f]",
11800 fps_range.min_fps, fps_range.max_fps);
11801 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11802 * follows:
11803 * ---------------------------------------------------------------|
11804 * Video stream is absent in configure_streams |
11805 * (Camcorder preview before the first video record |
11806 * ---------------------------------------------------------------|
11807 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11808 * | | | vid_min/max_fps|
11809 * ---------------------------------------------------------------|
11810 * NO | [ 30, 240] | 240 | [240, 240] |
11811 * |-------------|-------------|----------------|
11812 * | [240, 240] | 240 | [240, 240] |
11813 * ---------------------------------------------------------------|
11814 * Video stream is present in configure_streams |
11815 * ---------------------------------------------------------------|
11816 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11817 * | | | vid_min/max_fps|
11818 * ---------------------------------------------------------------|
11819 * NO | [ 30, 240] | 240 | [240, 240] |
11820 * (camcorder prev |-------------|-------------|----------------|
11821 * after video rec | [240, 240] | 240 | [240, 240] |
11822 * is stopped) | | | |
11823 * ---------------------------------------------------------------|
11824 * YES | [ 30, 240] | 240 | [240, 240] |
11825 * |-------------|-------------|----------------|
11826 * | [240, 240] | 240 | [240, 240] |
11827 * ---------------------------------------------------------------|
11828 * When Video stream is absent in configure_streams,
11829 * preview fps = sensor_fps / batchsize
11830 * Eg: for 240fps at batchSize 4, preview = 60fps
11831 * for 120fps at batchSize 4, preview = 30fps
11832 *
11833 * When video stream is present in configure_streams, preview fps is as per
11834 * the ratio of preview buffers to video buffers requested in process
11835 * capture request
11836 */
11837 mBatchSize = 0;
11838 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11839 fps_range.min_fps = fps_range.video_max_fps;
11840 fps_range.video_min_fps = fps_range.video_max_fps;
11841 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11842 fps_range.max_fps);
11843 if (NAME_NOT_FOUND != val) {
11844 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11846 return BAD_VALUE;
11847 }
11848
11849 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11850 /* If batchmode is currently in progress and the fps changes,
11851 * set the flag to restart the sensor */
11852 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11853 (mHFRVideoFps != fps_range.max_fps)) {
11854 mNeedSensorRestart = true;
11855 }
11856 mHFRVideoFps = fps_range.max_fps;
11857 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11858 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11859 mBatchSize = MAX_HFR_BATCH_SIZE;
11860 }
11861 }
11862 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11863
11864 }
11865 } else {
11866 /* HFR mode is session param in backend/ISP. This should be reset when
11867 * in non-HFR mode */
11868 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11869 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11870 return BAD_VALUE;
11871 }
11872 }
11873 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11874 return BAD_VALUE;
11875 }
11876 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11877 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11878 return rc;
11879}
11880
11881/*===========================================================================
11882 * FUNCTION : translateToHalMetadata
11883 *
11884 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11885 *
11886 *
11887 * PARAMETERS :
11888 * @request : request sent from framework
11889 *
11890 *
11891 * RETURN : success: NO_ERROR
11892 * failure:
11893 *==========================================================================*/
11894int QCamera3HardwareInterface::translateToHalMetadata
11895 (const camera3_capture_request_t *request,
11896 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011897 uint32_t snapshotStreamId) {
11898 if (request == nullptr || hal_metadata == nullptr) {
11899 return BAD_VALUE;
11900 }
11901
11902 int64_t minFrameDuration = getMinFrameDuration(request);
11903
11904 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11905 minFrameDuration);
11906}
11907
11908int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11909 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11910 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11911
Thierry Strudel3d639192016-09-09 11:52:26 -070011912 int rc = 0;
11913 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011914 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011915
11916 /* Do not change the order of the following list unless you know what you are
11917 * doing.
11918 * The order is laid out in such a way that parameters in the front of the table
11919 * may be used to override the parameters later in the table. Examples are:
11920 * 1. META_MODE should precede AEC/AWB/AF MODE
11921 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11922 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11923 * 4. Any mode should precede it's corresponding settings
11924 */
11925 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11926 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11927 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11928 rc = BAD_VALUE;
11929 }
11930 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11931 if (rc != NO_ERROR) {
11932 LOGE("extractSceneMode failed");
11933 }
11934 }
11935
11936 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11937 uint8_t fwk_aeMode =
11938 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11939 uint8_t aeMode;
11940 int32_t redeye;
11941
11942 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11943 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011944 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11945 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011946 } else {
11947 aeMode = CAM_AE_MODE_ON;
11948 }
11949 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11950 redeye = 1;
11951 } else {
11952 redeye = 0;
11953 }
11954
11955 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11956 fwk_aeMode);
11957 if (NAME_NOT_FOUND != val) {
11958 int32_t flashMode = (int32_t)val;
11959 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11960 }
11961
11962 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11963 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11964 rc = BAD_VALUE;
11965 }
11966 }
11967
11968 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11969 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11970 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11971 fwk_whiteLevel);
11972 if (NAME_NOT_FOUND != val) {
11973 uint8_t whiteLevel = (uint8_t)val;
11974 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11975 rc = BAD_VALUE;
11976 }
11977 }
11978 }
11979
11980 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11981 uint8_t fwk_cacMode =
11982 frame_settings.find(
11983 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11984 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11985 fwk_cacMode);
11986 if (NAME_NOT_FOUND != val) {
11987 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11988 bool entryAvailable = FALSE;
11989 // Check whether Frameworks set CAC mode is supported in device or not
11990 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11991 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11992 entryAvailable = TRUE;
11993 break;
11994 }
11995 }
11996 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11997 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11998 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11999 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12000 if (entryAvailable == FALSE) {
12001 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12002 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12003 } else {
12004 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12005 // High is not supported and so set the FAST as spec say's underlying
12006 // device implementation can be the same for both modes.
12007 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12008 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12009 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12010 // in order to avoid the fps drop due to high quality
12011 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12012 } else {
12013 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12014 }
12015 }
12016 }
12017 LOGD("Final cacMode is %d", cacMode);
12018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12019 rc = BAD_VALUE;
12020 }
12021 } else {
12022 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12023 }
12024 }
12025
Thierry Strudel2896d122017-02-23 19:18:03 -080012026 char af_value[PROPERTY_VALUE_MAX];
12027 property_get("persist.camera.af.infinity", af_value, "0");
12028
Jason Lee84ae9972017-02-24 13:24:24 -080012029 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080012030 if (atoi(af_value) == 0) {
12031 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012032 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012033 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12034 fwk_focusMode);
12035 if (NAME_NOT_FOUND != val) {
12036 uint8_t focusMode = (uint8_t)val;
12037 LOGD("set focus mode %d", focusMode);
12038 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12039 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12040 rc = BAD_VALUE;
12041 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012042 }
12043 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012044 } else {
12045 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12046 LOGE("Focus forced to infinity %d", focusMode);
12047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12048 rc = BAD_VALUE;
12049 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012050 }
12051
Jason Lee84ae9972017-02-24 13:24:24 -080012052 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12053 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012054 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12055 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12056 focalDistance)) {
12057 rc = BAD_VALUE;
12058 }
12059 }
12060
12061 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12062 uint8_t fwk_antibandingMode =
12063 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12064 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12065 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12066 if (NAME_NOT_FOUND != val) {
12067 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012068 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12069 if (m60HzZone) {
12070 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12071 } else {
12072 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12073 }
12074 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012075 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12076 hal_antibandingMode)) {
12077 rc = BAD_VALUE;
12078 }
12079 }
12080 }
12081
12082 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12083 int32_t expCompensation = frame_settings.find(
12084 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12085 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12086 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12087 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12088 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012089 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012090 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12091 expCompensation)) {
12092 rc = BAD_VALUE;
12093 }
12094 }
12095
12096 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12097 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12099 rc = BAD_VALUE;
12100 }
12101 }
12102 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12103 rc = setHalFpsRange(frame_settings, hal_metadata);
12104 if (rc != NO_ERROR) {
12105 LOGE("setHalFpsRange failed");
12106 }
12107 }
12108
12109 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12110 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12111 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12112 rc = BAD_VALUE;
12113 }
12114 }
12115
12116 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12117 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12118 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12119 fwk_effectMode);
12120 if (NAME_NOT_FOUND != val) {
12121 uint8_t effectMode = (uint8_t)val;
12122 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12123 rc = BAD_VALUE;
12124 }
12125 }
12126 }
12127
12128 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12129 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12130 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12131 colorCorrectMode)) {
12132 rc = BAD_VALUE;
12133 }
12134 }
12135
12136 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12137 cam_color_correct_gains_t colorCorrectGains;
12138 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12139 colorCorrectGains.gains[i] =
12140 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12141 }
12142 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12143 colorCorrectGains)) {
12144 rc = BAD_VALUE;
12145 }
12146 }
12147
12148 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12149 cam_color_correct_matrix_t colorCorrectTransform;
12150 cam_rational_type_t transform_elem;
12151 size_t num = 0;
12152 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12153 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12154 transform_elem.numerator =
12155 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12156 transform_elem.denominator =
12157 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12158 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12159 num++;
12160 }
12161 }
12162 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12163 colorCorrectTransform)) {
12164 rc = BAD_VALUE;
12165 }
12166 }
12167
12168 cam_trigger_t aecTrigger;
12169 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12170 aecTrigger.trigger_id = -1;
12171 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12172 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12173 aecTrigger.trigger =
12174 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12175 aecTrigger.trigger_id =
12176 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12177 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12178 aecTrigger)) {
12179 rc = BAD_VALUE;
12180 }
12181 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12182 aecTrigger.trigger, aecTrigger.trigger_id);
12183 }
12184
12185 /*af_trigger must come with a trigger id*/
12186 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12187 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12188 cam_trigger_t af_trigger;
12189 af_trigger.trigger =
12190 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12191 af_trigger.trigger_id =
12192 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12193 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12194 rc = BAD_VALUE;
12195 }
12196 LOGD("AfTrigger: %d AfTriggerID: %d",
12197 af_trigger.trigger, af_trigger.trigger_id);
12198 }
12199
12200 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12201 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12202 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12203 rc = BAD_VALUE;
12204 }
12205 }
12206 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12207 cam_edge_application_t edge_application;
12208 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012209
Thierry Strudel3d639192016-09-09 11:52:26 -070012210 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12211 edge_application.sharpness = 0;
12212 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012213 edge_application.sharpness =
12214 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12215 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12216 int32_t sharpness =
12217 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12218 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12219 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12220 LOGD("Setting edge mode sharpness %d", sharpness);
12221 edge_application.sharpness = sharpness;
12222 }
12223 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012224 }
12225 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12226 rc = BAD_VALUE;
12227 }
12228 }
12229
12230 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12231 int32_t respectFlashMode = 1;
12232 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12233 uint8_t fwk_aeMode =
12234 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012235 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12236 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12237 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012238 respectFlashMode = 0;
12239 LOGH("AE Mode controls flash, ignore android.flash.mode");
12240 }
12241 }
12242 if (respectFlashMode) {
12243 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12244 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12245 LOGH("flash mode after mapping %d", val);
12246 // To check: CAM_INTF_META_FLASH_MODE usage
12247 if (NAME_NOT_FOUND != val) {
12248 uint8_t flashMode = (uint8_t)val;
12249 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12250 rc = BAD_VALUE;
12251 }
12252 }
12253 }
12254 }
12255
12256 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12257 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12258 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12259 rc = BAD_VALUE;
12260 }
12261 }
12262
12263 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12264 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12266 flashFiringTime)) {
12267 rc = BAD_VALUE;
12268 }
12269 }
12270
12271 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12272 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12274 hotPixelMode)) {
12275 rc = BAD_VALUE;
12276 }
12277 }
12278
12279 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12280 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12281 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12282 lensAperture)) {
12283 rc = BAD_VALUE;
12284 }
12285 }
12286
12287 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12288 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12289 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12290 filterDensity)) {
12291 rc = BAD_VALUE;
12292 }
12293 }
12294
12295 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12296 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12298 focalLength)) {
12299 rc = BAD_VALUE;
12300 }
12301 }
12302
12303 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12304 uint8_t optStabMode =
12305 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12306 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12307 optStabMode)) {
12308 rc = BAD_VALUE;
12309 }
12310 }
12311
12312 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12313 uint8_t videoStabMode =
12314 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12315 LOGD("videoStabMode from APP = %d", videoStabMode);
12316 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12317 videoStabMode)) {
12318 rc = BAD_VALUE;
12319 }
12320 }
12321
12322
12323 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12324 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12326 noiseRedMode)) {
12327 rc = BAD_VALUE;
12328 }
12329 }
12330
12331 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12332 float reprocessEffectiveExposureFactor =
12333 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12334 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12335 reprocessEffectiveExposureFactor)) {
12336 rc = BAD_VALUE;
12337 }
12338 }
12339
12340 cam_crop_region_t scalerCropRegion;
12341 bool scalerCropSet = false;
12342 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12343 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12344 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12345 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12346 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12347
12348 // Map coordinate system from active array to sensor output.
12349 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12350 scalerCropRegion.width, scalerCropRegion.height);
12351
12352 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12353 scalerCropRegion)) {
12354 rc = BAD_VALUE;
12355 }
12356 scalerCropSet = true;
12357 }
12358
12359 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12360 int64_t sensorExpTime =
12361 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12362 LOGD("setting sensorExpTime %lld", sensorExpTime);
12363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12364 sensorExpTime)) {
12365 rc = BAD_VALUE;
12366 }
12367 }
12368
12369 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12370 int64_t sensorFrameDuration =
12371 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012372 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12373 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12374 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12375 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12376 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12377 sensorFrameDuration)) {
12378 rc = BAD_VALUE;
12379 }
12380 }
12381
12382 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12383 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12384 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12385 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12386 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12387 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12388 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12389 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12390 sensorSensitivity)) {
12391 rc = BAD_VALUE;
12392 }
12393 }
12394
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012395#ifndef USE_HAL_3_3
12396 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12397 int32_t ispSensitivity =
12398 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12399 if (ispSensitivity <
12400 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12401 ispSensitivity =
12402 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12403 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12404 }
12405 if (ispSensitivity >
12406 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12407 ispSensitivity =
12408 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12409 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12410 }
12411 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12412 ispSensitivity)) {
12413 rc = BAD_VALUE;
12414 }
12415 }
12416#endif
12417
Thierry Strudel3d639192016-09-09 11:52:26 -070012418 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12419 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12420 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12421 rc = BAD_VALUE;
12422 }
12423 }
12424
12425 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12426 uint8_t fwk_facedetectMode =
12427 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12428
12429 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12430 fwk_facedetectMode);
12431
12432 if (NAME_NOT_FOUND != val) {
12433 uint8_t facedetectMode = (uint8_t)val;
12434 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12435 facedetectMode)) {
12436 rc = BAD_VALUE;
12437 }
12438 }
12439 }
12440
Thierry Strudel54dc9782017-02-15 12:12:10 -080012441 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012442 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012443 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012444 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12445 histogramMode)) {
12446 rc = BAD_VALUE;
12447 }
12448 }
12449
12450 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12451 uint8_t sharpnessMapMode =
12452 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12453 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12454 sharpnessMapMode)) {
12455 rc = BAD_VALUE;
12456 }
12457 }
12458
12459 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12460 uint8_t tonemapMode =
12461 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12462 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12463 rc = BAD_VALUE;
12464 }
12465 }
12466 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12467 /*All tonemap channels will have the same number of points*/
12468 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12469 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12470 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12471 cam_rgb_tonemap_curves tonemapCurves;
12472 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12473 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12474 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12475 tonemapCurves.tonemap_points_cnt,
12476 CAM_MAX_TONEMAP_CURVE_SIZE);
12477 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12478 }
12479
12480 /* ch0 = G*/
12481 size_t point = 0;
12482 cam_tonemap_curve_t tonemapCurveGreen;
12483 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12484 for (size_t j = 0; j < 2; j++) {
12485 tonemapCurveGreen.tonemap_points[i][j] =
12486 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12487 point++;
12488 }
12489 }
12490 tonemapCurves.curves[0] = tonemapCurveGreen;
12491
12492 /* ch 1 = B */
12493 point = 0;
12494 cam_tonemap_curve_t tonemapCurveBlue;
12495 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12496 for (size_t j = 0; j < 2; j++) {
12497 tonemapCurveBlue.tonemap_points[i][j] =
12498 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12499 point++;
12500 }
12501 }
12502 tonemapCurves.curves[1] = tonemapCurveBlue;
12503
12504 /* ch 2 = R */
12505 point = 0;
12506 cam_tonemap_curve_t tonemapCurveRed;
12507 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12508 for (size_t j = 0; j < 2; j++) {
12509 tonemapCurveRed.tonemap_points[i][j] =
12510 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12511 point++;
12512 }
12513 }
12514 tonemapCurves.curves[2] = tonemapCurveRed;
12515
12516 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12517 tonemapCurves)) {
12518 rc = BAD_VALUE;
12519 }
12520 }
12521
12522 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12523 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12524 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12525 captureIntent)) {
12526 rc = BAD_VALUE;
12527 }
12528 }
12529
12530 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12531 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12532 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12533 blackLevelLock)) {
12534 rc = BAD_VALUE;
12535 }
12536 }
12537
12538 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12539 uint8_t lensShadingMapMode =
12540 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12541 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12542 lensShadingMapMode)) {
12543 rc = BAD_VALUE;
12544 }
12545 }
12546
12547 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12548 cam_area_t roi;
12549 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012550 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012551
12552 // Map coordinate system from active array to sensor output.
12553 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12554 roi.rect.height);
12555
12556 if (scalerCropSet) {
12557 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12558 }
12559 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12560 rc = BAD_VALUE;
12561 }
12562 }
12563
12564 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12565 cam_area_t roi;
12566 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012567 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012568
12569 // Map coordinate system from active array to sensor output.
12570 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12571 roi.rect.height);
12572
12573 if (scalerCropSet) {
12574 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12575 }
12576 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12577 rc = BAD_VALUE;
12578 }
12579 }
12580
12581 // CDS for non-HFR non-video mode
12582 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12583 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12584 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12585 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12586 LOGE("Invalid CDS mode %d!", *fwk_cds);
12587 } else {
12588 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12589 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12590 rc = BAD_VALUE;
12591 }
12592 }
12593 }
12594
Thierry Strudel04e026f2016-10-10 11:27:36 -070012595 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012596 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012597 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012598 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12599 }
12600 if (m_bVideoHdrEnabled)
12601 vhdr = CAM_VIDEO_HDR_MODE_ON;
12602
Thierry Strudel54dc9782017-02-15 12:12:10 -080012603 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12604
12605 if(vhdr != curr_hdr_state)
12606 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12607
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012608 rc = setVideoHdrMode(mParameters, vhdr);
12609 if (rc != NO_ERROR) {
12610 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012611 }
12612
12613 //IR
12614 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12615 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12616 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012617 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12618 uint8_t isIRon = 0;
12619
12620 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012621 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12622 LOGE("Invalid IR mode %d!", fwk_ir);
12623 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012624 if(isIRon != curr_ir_state )
12625 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12626
Thierry Strudel04e026f2016-10-10 11:27:36 -070012627 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12628 CAM_INTF_META_IR_MODE, fwk_ir)) {
12629 rc = BAD_VALUE;
12630 }
12631 }
12632 }
12633
Thierry Strudel54dc9782017-02-15 12:12:10 -080012634 //Binning Correction Mode
12635 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12636 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12637 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12638 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12639 || (0 > fwk_binning_correction)) {
12640 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12641 } else {
12642 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12643 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12644 rc = BAD_VALUE;
12645 }
12646 }
12647 }
12648
Thierry Strudel269c81a2016-10-12 12:13:59 -070012649 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12650 float aec_speed;
12651 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12652 LOGD("AEC Speed :%f", aec_speed);
12653 if ( aec_speed < 0 ) {
12654 LOGE("Invalid AEC mode %f!", aec_speed);
12655 } else {
12656 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12657 aec_speed)) {
12658 rc = BAD_VALUE;
12659 }
12660 }
12661 }
12662
12663 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12664 float awb_speed;
12665 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12666 LOGD("AWB Speed :%f", awb_speed);
12667 if ( awb_speed < 0 ) {
12668 LOGE("Invalid AWB mode %f!", awb_speed);
12669 } else {
12670 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12671 awb_speed)) {
12672 rc = BAD_VALUE;
12673 }
12674 }
12675 }
12676
Thierry Strudel3d639192016-09-09 11:52:26 -070012677 // TNR
12678 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12679 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12680 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012681 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012682 cam_denoise_param_t tnr;
12683 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12684 tnr.process_plates =
12685 (cam_denoise_process_type_t)frame_settings.find(
12686 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12687 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012688
12689 if(b_TnrRequested != curr_tnr_state)
12690 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12691
Thierry Strudel3d639192016-09-09 11:52:26 -070012692 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12693 rc = BAD_VALUE;
12694 }
12695 }
12696
Thierry Strudel54dc9782017-02-15 12:12:10 -080012697 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012698 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012699 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012700 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12701 *exposure_metering_mode)) {
12702 rc = BAD_VALUE;
12703 }
12704 }
12705
Thierry Strudel3d639192016-09-09 11:52:26 -070012706 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12707 int32_t fwk_testPatternMode =
12708 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12709 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12710 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12711
12712 if (NAME_NOT_FOUND != testPatternMode) {
12713 cam_test_pattern_data_t testPatternData;
12714 memset(&testPatternData, 0, sizeof(testPatternData));
12715 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12716 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12717 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12718 int32_t *fwk_testPatternData =
12719 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12720 testPatternData.r = fwk_testPatternData[0];
12721 testPatternData.b = fwk_testPatternData[3];
12722 switch (gCamCapability[mCameraId]->color_arrangement) {
12723 case CAM_FILTER_ARRANGEMENT_RGGB:
12724 case CAM_FILTER_ARRANGEMENT_GRBG:
12725 testPatternData.gr = fwk_testPatternData[1];
12726 testPatternData.gb = fwk_testPatternData[2];
12727 break;
12728 case CAM_FILTER_ARRANGEMENT_GBRG:
12729 case CAM_FILTER_ARRANGEMENT_BGGR:
12730 testPatternData.gr = fwk_testPatternData[2];
12731 testPatternData.gb = fwk_testPatternData[1];
12732 break;
12733 default:
12734 LOGE("color arrangement %d is not supported",
12735 gCamCapability[mCameraId]->color_arrangement);
12736 break;
12737 }
12738 }
12739 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12740 testPatternData)) {
12741 rc = BAD_VALUE;
12742 }
12743 } else {
12744 LOGE("Invalid framework sensor test pattern mode %d",
12745 fwk_testPatternMode);
12746 }
12747 }
12748
12749 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12750 size_t count = 0;
12751 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12752 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12753 gps_coords.data.d, gps_coords.count, count);
12754 if (gps_coords.count != count) {
12755 rc = BAD_VALUE;
12756 }
12757 }
12758
12759 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12760 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12761 size_t count = 0;
12762 const char *gps_methods_src = (const char *)
12763 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12764 memset(gps_methods, '\0', sizeof(gps_methods));
12765 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12766 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12767 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12768 if (GPS_PROCESSING_METHOD_SIZE != count) {
12769 rc = BAD_VALUE;
12770 }
12771 }
12772
12773 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12774 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12775 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12776 gps_timestamp)) {
12777 rc = BAD_VALUE;
12778 }
12779 }
12780
12781 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12782 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12783 cam_rotation_info_t rotation_info;
12784 if (orientation == 0) {
12785 rotation_info.rotation = ROTATE_0;
12786 } else if (orientation == 90) {
12787 rotation_info.rotation = ROTATE_90;
12788 } else if (orientation == 180) {
12789 rotation_info.rotation = ROTATE_180;
12790 } else if (orientation == 270) {
12791 rotation_info.rotation = ROTATE_270;
12792 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012793 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012794 rotation_info.streamId = snapshotStreamId;
12795 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12796 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12797 rc = BAD_VALUE;
12798 }
12799 }
12800
12801 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12802 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12803 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12804 rc = BAD_VALUE;
12805 }
12806 }
12807
12808 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12809 uint32_t thumb_quality = (uint32_t)
12810 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12811 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12812 thumb_quality)) {
12813 rc = BAD_VALUE;
12814 }
12815 }
12816
12817 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12818 cam_dimension_t dim;
12819 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12820 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12821 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12822 rc = BAD_VALUE;
12823 }
12824 }
12825
12826 // Internal metadata
12827 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12828 size_t count = 0;
12829 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12830 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12831 privatedata.data.i32, privatedata.count, count);
12832 if (privatedata.count != count) {
12833 rc = BAD_VALUE;
12834 }
12835 }
12836
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012837 // ISO/Exposure Priority
12838 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12839 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12840 cam_priority_mode_t mode =
12841 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12842 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12843 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12844 use_iso_exp_pty.previewOnly = FALSE;
12845 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12846 use_iso_exp_pty.value = *ptr;
12847
12848 if(CAM_ISO_PRIORITY == mode) {
12849 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12850 use_iso_exp_pty)) {
12851 rc = BAD_VALUE;
12852 }
12853 }
12854 else {
12855 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12856 use_iso_exp_pty)) {
12857 rc = BAD_VALUE;
12858 }
12859 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012860
12861 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12862 rc = BAD_VALUE;
12863 }
12864 }
12865 } else {
12866 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12867 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012868 }
12869 }
12870
12871 // Saturation
12872 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12873 int32_t* use_saturation =
12874 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12876 rc = BAD_VALUE;
12877 }
12878 }
12879
Thierry Strudel3d639192016-09-09 11:52:26 -070012880 // EV step
12881 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12882 gCamCapability[mCameraId]->exp_compensation_step)) {
12883 rc = BAD_VALUE;
12884 }
12885
12886 // CDS info
12887 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12888 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12889 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12890
12891 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12892 CAM_INTF_META_CDS_DATA, *cdsData)) {
12893 rc = BAD_VALUE;
12894 }
12895 }
12896
Shuzhen Wang19463d72016-03-08 11:09:52 -080012897 // Hybrid AE
12898 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12899 uint8_t *hybrid_ae = (uint8_t *)
12900 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12901
12902 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12903 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12904 rc = BAD_VALUE;
12905 }
12906 }
12907
Shuzhen Wang14415f52016-11-16 18:26:18 -080012908 // Histogram
12909 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12910 uint8_t histogramMode =
12911 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12912 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12913 histogramMode)) {
12914 rc = BAD_VALUE;
12915 }
12916 }
12917
12918 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12919 int32_t histogramBins =
12920 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12921 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12922 histogramBins)) {
12923 rc = BAD_VALUE;
12924 }
12925 }
12926
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012927 // Tracking AF
12928 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12929 uint8_t trackingAfTrigger =
12930 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12931 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12932 trackingAfTrigger)) {
12933 rc = BAD_VALUE;
12934 }
12935 }
12936
Thierry Strudel3d639192016-09-09 11:52:26 -070012937 return rc;
12938}
12939
12940/*===========================================================================
12941 * FUNCTION : captureResultCb
12942 *
12943 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12944 *
12945 * PARAMETERS :
12946 * @frame : frame information from mm-camera-interface
12947 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12948 * @userdata: userdata
12949 *
12950 * RETURN : NONE
12951 *==========================================================================*/
12952void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12953 camera3_stream_buffer_t *buffer,
12954 uint32_t frame_number, bool isInputBuffer, void *userdata)
12955{
12956 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12957 if (hw == NULL) {
12958 LOGE("Invalid hw %p", hw);
12959 return;
12960 }
12961
12962 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12963 return;
12964}
12965
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012966/*===========================================================================
12967 * FUNCTION : setBufferErrorStatus
12968 *
12969 * DESCRIPTION: Callback handler for channels to report any buffer errors
12970 *
12971 * PARAMETERS :
12972 * @ch : Channel on which buffer error is reported from
12973 * @frame_number : frame number on which buffer error is reported on
12974 * @buffer_status : buffer error status
12975 * @userdata: userdata
12976 *
12977 * RETURN : NONE
12978 *==========================================================================*/
12979void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12980 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12981{
12982 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12983 if (hw == NULL) {
12984 LOGE("Invalid hw %p", hw);
12985 return;
12986 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012987
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012988 hw->setBufferErrorStatus(ch, frame_number, err);
12989 return;
12990}
12991
12992void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12993 uint32_t frameNumber, camera3_buffer_status_t err)
12994{
12995 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12996 pthread_mutex_lock(&mMutex);
12997
12998 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12999 if (req.frame_number != frameNumber)
13000 continue;
13001 for (auto& k : req.mPendingBufferList) {
13002 if(k.stream->priv == ch) {
13003 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13004 }
13005 }
13006 }
13007
13008 pthread_mutex_unlock(&mMutex);
13009 return;
13010}
Thierry Strudel3d639192016-09-09 11:52:26 -070013011/*===========================================================================
13012 * FUNCTION : initialize
13013 *
13014 * DESCRIPTION: Pass framework callback pointers to HAL
13015 *
13016 * PARAMETERS :
13017 *
13018 *
13019 * RETURN : Success : 0
13020 * Failure: -ENODEV
13021 *==========================================================================*/
13022
13023int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13024 const camera3_callback_ops_t *callback_ops)
13025{
13026 LOGD("E");
13027 QCamera3HardwareInterface *hw =
13028 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13029 if (!hw) {
13030 LOGE("NULL camera device");
13031 return -ENODEV;
13032 }
13033
13034 int rc = hw->initialize(callback_ops);
13035 LOGD("X");
13036 return rc;
13037}
13038
13039/*===========================================================================
13040 * FUNCTION : configure_streams
13041 *
13042 * DESCRIPTION:
13043 *
13044 * PARAMETERS :
13045 *
13046 *
13047 * RETURN : Success: 0
13048 * Failure: -EINVAL (if stream configuration is invalid)
13049 * -ENODEV (fatal error)
13050 *==========================================================================*/
13051
13052int QCamera3HardwareInterface::configure_streams(
13053 const struct camera3_device *device,
13054 camera3_stream_configuration_t *stream_list)
13055{
13056 LOGD("E");
13057 QCamera3HardwareInterface *hw =
13058 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13059 if (!hw) {
13060 LOGE("NULL camera device");
13061 return -ENODEV;
13062 }
13063 int rc = hw->configureStreams(stream_list);
13064 LOGD("X");
13065 return rc;
13066}
13067
13068/*===========================================================================
13069 * FUNCTION : construct_default_request_settings
13070 *
13071 * DESCRIPTION: Configure a settings buffer to meet the required use case
13072 *
13073 * PARAMETERS :
13074 *
13075 *
13076 * RETURN : Success: Return valid metadata
13077 * Failure: Return NULL
13078 *==========================================================================*/
13079const camera_metadata_t* QCamera3HardwareInterface::
13080 construct_default_request_settings(const struct camera3_device *device,
13081 int type)
13082{
13083
13084 LOGD("E");
13085 camera_metadata_t* fwk_metadata = NULL;
13086 QCamera3HardwareInterface *hw =
13087 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13088 if (!hw) {
13089 LOGE("NULL camera device");
13090 return NULL;
13091 }
13092
13093 fwk_metadata = hw->translateCapabilityToMetadata(type);
13094
13095 LOGD("X");
13096 return fwk_metadata;
13097}
13098
13099/*===========================================================================
13100 * FUNCTION : process_capture_request
13101 *
13102 * DESCRIPTION:
13103 *
13104 * PARAMETERS :
13105 *
13106 *
13107 * RETURN :
13108 *==========================================================================*/
13109int QCamera3HardwareInterface::process_capture_request(
13110 const struct camera3_device *device,
13111 camera3_capture_request_t *request)
13112{
13113 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013114 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013115 QCamera3HardwareInterface *hw =
13116 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13117 if (!hw) {
13118 LOGE("NULL camera device");
13119 return -EINVAL;
13120 }
13121
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013122 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013123 LOGD("X");
13124 return rc;
13125}
13126
13127/*===========================================================================
13128 * FUNCTION : dump
13129 *
13130 * DESCRIPTION:
13131 *
13132 * PARAMETERS :
13133 *
13134 *
13135 * RETURN :
13136 *==========================================================================*/
13137
13138void QCamera3HardwareInterface::dump(
13139 const struct camera3_device *device, int fd)
13140{
13141 /* Log level property is read when "adb shell dumpsys media.camera" is
13142 called so that the log level can be controlled without restarting
13143 the media server */
13144 getLogLevel();
13145
13146 LOGD("E");
13147 QCamera3HardwareInterface *hw =
13148 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13149 if (!hw) {
13150 LOGE("NULL camera device");
13151 return;
13152 }
13153
13154 hw->dump(fd);
13155 LOGD("X");
13156 return;
13157}
13158
13159/*===========================================================================
13160 * FUNCTION : flush
13161 *
13162 * DESCRIPTION:
13163 *
13164 * PARAMETERS :
13165 *
13166 *
13167 * RETURN :
13168 *==========================================================================*/
13169
13170int QCamera3HardwareInterface::flush(
13171 const struct camera3_device *device)
13172{
13173 int rc;
13174 LOGD("E");
13175 QCamera3HardwareInterface *hw =
13176 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13177 if (!hw) {
13178 LOGE("NULL camera device");
13179 return -EINVAL;
13180 }
13181
13182 pthread_mutex_lock(&hw->mMutex);
13183 // Validate current state
13184 switch (hw->mState) {
13185 case STARTED:
13186 /* valid state */
13187 break;
13188
13189 case ERROR:
13190 pthread_mutex_unlock(&hw->mMutex);
13191 hw->handleCameraDeviceError();
13192 return -ENODEV;
13193
13194 default:
13195 LOGI("Flush returned during state %d", hw->mState);
13196 pthread_mutex_unlock(&hw->mMutex);
13197 return 0;
13198 }
13199 pthread_mutex_unlock(&hw->mMutex);
13200
13201 rc = hw->flush(true /* restart channels */ );
13202 LOGD("X");
13203 return rc;
13204}
13205
13206/*===========================================================================
13207 * FUNCTION : close_camera_device
13208 *
13209 * DESCRIPTION:
13210 *
13211 * PARAMETERS :
13212 *
13213 *
13214 * RETURN :
13215 *==========================================================================*/
13216int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13217{
13218 int ret = NO_ERROR;
13219 QCamera3HardwareInterface *hw =
13220 reinterpret_cast<QCamera3HardwareInterface *>(
13221 reinterpret_cast<camera3_device_t *>(device)->priv);
13222 if (!hw) {
13223 LOGE("NULL camera device");
13224 return BAD_VALUE;
13225 }
13226
13227 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13228 delete hw;
13229 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013230 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013231 return ret;
13232}
13233
13234/*===========================================================================
13235 * FUNCTION : getWaveletDenoiseProcessPlate
13236 *
13237 * DESCRIPTION: query wavelet denoise process plate
13238 *
13239 * PARAMETERS : None
13240 *
13241 * RETURN : WNR prcocess plate value
13242 *==========================================================================*/
13243cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13244{
13245 char prop[PROPERTY_VALUE_MAX];
13246 memset(prop, 0, sizeof(prop));
13247 property_get("persist.denoise.process.plates", prop, "0");
13248 int processPlate = atoi(prop);
13249 switch(processPlate) {
13250 case 0:
13251 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13252 case 1:
13253 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13254 case 2:
13255 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13256 case 3:
13257 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13258 default:
13259 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13260 }
13261}
13262
13263
13264/*===========================================================================
13265 * FUNCTION : getTemporalDenoiseProcessPlate
13266 *
13267 * DESCRIPTION: query temporal denoise process plate
13268 *
13269 * PARAMETERS : None
13270 *
13271 * RETURN : TNR prcocess plate value
13272 *==========================================================================*/
13273cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13274{
13275 char prop[PROPERTY_VALUE_MAX];
13276 memset(prop, 0, sizeof(prop));
13277 property_get("persist.tnr.process.plates", prop, "0");
13278 int processPlate = atoi(prop);
13279 switch(processPlate) {
13280 case 0:
13281 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13282 case 1:
13283 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13284 case 2:
13285 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13286 case 3:
13287 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13288 default:
13289 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13290 }
13291}
13292
13293
13294/*===========================================================================
13295 * FUNCTION : extractSceneMode
13296 *
13297 * DESCRIPTION: Extract scene mode from frameworks set metadata
13298 *
13299 * PARAMETERS :
13300 * @frame_settings: CameraMetadata reference
13301 * @metaMode: ANDROID_CONTORL_MODE
13302 * @hal_metadata: hal metadata structure
13303 *
13304 * RETURN : None
13305 *==========================================================================*/
13306int32_t QCamera3HardwareInterface::extractSceneMode(
13307 const CameraMetadata &frame_settings, uint8_t metaMode,
13308 metadata_buffer_t *hal_metadata)
13309{
13310 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013311 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13312
13313 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13314 LOGD("Ignoring control mode OFF_KEEP_STATE");
13315 return NO_ERROR;
13316 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013317
13318 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13319 camera_metadata_ro_entry entry =
13320 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13321 if (0 == entry.count)
13322 return rc;
13323
13324 uint8_t fwk_sceneMode = entry.data.u8[0];
13325
13326 int val = lookupHalName(SCENE_MODES_MAP,
13327 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13328 fwk_sceneMode);
13329 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013330 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013331 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013332 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013333 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013334
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013335 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13336 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13337 }
13338
13339 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13340 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013341 cam_hdr_param_t hdr_params;
13342 hdr_params.hdr_enable = 1;
13343 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13344 hdr_params.hdr_need_1x = false;
13345 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13346 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13347 rc = BAD_VALUE;
13348 }
13349 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013350
Thierry Strudel3d639192016-09-09 11:52:26 -070013351 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13352 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13353 rc = BAD_VALUE;
13354 }
13355 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013356
13357 if (mForceHdrSnapshot) {
13358 cam_hdr_param_t hdr_params;
13359 hdr_params.hdr_enable = 1;
13360 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13361 hdr_params.hdr_need_1x = false;
13362 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13363 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13364 rc = BAD_VALUE;
13365 }
13366 }
13367
Thierry Strudel3d639192016-09-09 11:52:26 -070013368 return rc;
13369}
13370
13371/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013372 * FUNCTION : setVideoHdrMode
13373 *
13374 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13375 *
13376 * PARAMETERS :
13377 * @hal_metadata: hal metadata structure
13378 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13379 *
13380 * RETURN : None
13381 *==========================================================================*/
13382int32_t QCamera3HardwareInterface::setVideoHdrMode(
13383 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13384{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013385 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13386 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13387 }
13388
13389 LOGE("Invalid Video HDR mode %d!", vhdr);
13390 return BAD_VALUE;
13391}
13392
13393/*===========================================================================
13394 * FUNCTION : setSensorHDR
13395 *
13396 * DESCRIPTION: Enable/disable sensor HDR.
13397 *
13398 * PARAMETERS :
13399 * @hal_metadata: hal metadata structure
13400 * @enable: boolean whether to enable/disable sensor HDR
13401 *
13402 * RETURN : None
13403 *==========================================================================*/
13404int32_t QCamera3HardwareInterface::setSensorHDR(
13405 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13406{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013407 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013408 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13409
13410 if (enable) {
13411 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13412 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13413 #ifdef _LE_CAMERA_
13414 //Default to staggered HDR for IOT
13415 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13416 #else
13417 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13418 #endif
13419 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13420 }
13421
13422 bool isSupported = false;
13423 switch (sensor_hdr) {
13424 case CAM_SENSOR_HDR_IN_SENSOR:
13425 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13426 CAM_QCOM_FEATURE_SENSOR_HDR) {
13427 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013428 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013429 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013430 break;
13431 case CAM_SENSOR_HDR_ZIGZAG:
13432 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13433 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13434 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013435 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013436 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013437 break;
13438 case CAM_SENSOR_HDR_STAGGERED:
13439 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13440 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13441 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013442 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013443 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013444 break;
13445 case CAM_SENSOR_HDR_OFF:
13446 isSupported = true;
13447 LOGD("Turning off sensor HDR");
13448 break;
13449 default:
13450 LOGE("HDR mode %d not supported", sensor_hdr);
13451 rc = BAD_VALUE;
13452 break;
13453 }
13454
13455 if(isSupported) {
13456 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13457 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13458 rc = BAD_VALUE;
13459 } else {
13460 if(!isVideoHdrEnable)
13461 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013462 }
13463 }
13464 return rc;
13465}
13466
13467/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013468 * FUNCTION : needRotationReprocess
13469 *
13470 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13471 *
13472 * PARAMETERS : none
13473 *
13474 * RETURN : true: needed
13475 * false: no need
13476 *==========================================================================*/
13477bool QCamera3HardwareInterface::needRotationReprocess()
13478{
13479 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13480 // current rotation is not zero, and pp has the capability to process rotation
13481 LOGH("need do reprocess for rotation");
13482 return true;
13483 }
13484
13485 return false;
13486}
13487
13488/*===========================================================================
13489 * FUNCTION : needReprocess
13490 *
13491 * DESCRIPTION: if reprocess in needed
13492 *
13493 * PARAMETERS : none
13494 *
13495 * RETURN : true: needed
13496 * false: no need
13497 *==========================================================================*/
13498bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13499{
13500 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13501 // TODO: add for ZSL HDR later
13502 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13503 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13504 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13505 return true;
13506 } else {
13507 LOGH("already post processed frame");
13508 return false;
13509 }
13510 }
13511 return needRotationReprocess();
13512}
13513
13514/*===========================================================================
13515 * FUNCTION : needJpegExifRotation
13516 *
13517 * DESCRIPTION: if rotation from jpeg is needed
13518 *
13519 * PARAMETERS : none
13520 *
13521 * RETURN : true: needed
13522 * false: no need
13523 *==========================================================================*/
13524bool QCamera3HardwareInterface::needJpegExifRotation()
13525{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013526 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013527 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13528 LOGD("Need use Jpeg EXIF Rotation");
13529 return true;
13530 }
13531 return false;
13532}
13533
13534/*===========================================================================
13535 * FUNCTION : addOfflineReprocChannel
13536 *
13537 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13538 * coming from input channel
13539 *
13540 * PARAMETERS :
13541 * @config : reprocess configuration
13542 * @inputChHandle : pointer to the input (source) channel
13543 *
13544 *
13545 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13546 *==========================================================================*/
13547QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13548 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13549{
13550 int32_t rc = NO_ERROR;
13551 QCamera3ReprocessChannel *pChannel = NULL;
13552
13553 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013554 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13555 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013556 if (NULL == pChannel) {
13557 LOGE("no mem for reprocess channel");
13558 return NULL;
13559 }
13560
13561 rc = pChannel->initialize(IS_TYPE_NONE);
13562 if (rc != NO_ERROR) {
13563 LOGE("init reprocess channel failed, ret = %d", rc);
13564 delete pChannel;
13565 return NULL;
13566 }
13567
13568 // pp feature config
13569 cam_pp_feature_config_t pp_config;
13570 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13571
13572 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13573 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13574 & CAM_QCOM_FEATURE_DSDN) {
13575 //Use CPP CDS incase h/w supports it.
13576 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13577 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13578 }
13579 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13580 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13581 }
13582
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013583 if (config.hdr_param.hdr_enable) {
13584 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13585 pp_config.hdr_param = config.hdr_param;
13586 }
13587
13588 if (mForceHdrSnapshot) {
13589 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13590 pp_config.hdr_param.hdr_enable = 1;
13591 pp_config.hdr_param.hdr_need_1x = 0;
13592 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13593 }
13594
Thierry Strudel3d639192016-09-09 11:52:26 -070013595 rc = pChannel->addReprocStreamsFromSource(pp_config,
13596 config,
13597 IS_TYPE_NONE,
13598 mMetadataChannel);
13599
13600 if (rc != NO_ERROR) {
13601 delete pChannel;
13602 return NULL;
13603 }
13604 return pChannel;
13605}
13606
13607/*===========================================================================
13608 * FUNCTION : getMobicatMask
13609 *
13610 * DESCRIPTION: returns mobicat mask
13611 *
13612 * PARAMETERS : none
13613 *
13614 * RETURN : mobicat mask
13615 *
13616 *==========================================================================*/
13617uint8_t QCamera3HardwareInterface::getMobicatMask()
13618{
13619 return m_MobicatMask;
13620}
13621
13622/*===========================================================================
13623 * FUNCTION : setMobicat
13624 *
13625 * DESCRIPTION: set Mobicat on/off.
13626 *
13627 * PARAMETERS :
13628 * @params : none
13629 *
13630 * RETURN : int32_t type of status
13631 * NO_ERROR -- success
13632 * none-zero failure code
13633 *==========================================================================*/
13634int32_t QCamera3HardwareInterface::setMobicat()
13635{
13636 char value [PROPERTY_VALUE_MAX];
13637 property_get("persist.camera.mobicat", value, "0");
13638 int32_t ret = NO_ERROR;
13639 uint8_t enableMobi = (uint8_t)atoi(value);
13640
13641 if (enableMobi) {
13642 tune_cmd_t tune_cmd;
13643 tune_cmd.type = SET_RELOAD_CHROMATIX;
13644 tune_cmd.module = MODULE_ALL;
13645 tune_cmd.value = TRUE;
13646 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13647 CAM_INTF_PARM_SET_VFE_COMMAND,
13648 tune_cmd);
13649
13650 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13651 CAM_INTF_PARM_SET_PP_COMMAND,
13652 tune_cmd);
13653 }
13654 m_MobicatMask = enableMobi;
13655
13656 return ret;
13657}
13658
13659/*===========================================================================
13660* FUNCTION : getLogLevel
13661*
13662* DESCRIPTION: Reads the log level property into a variable
13663*
13664* PARAMETERS :
13665* None
13666*
13667* RETURN :
13668* None
13669*==========================================================================*/
13670void QCamera3HardwareInterface::getLogLevel()
13671{
13672 char prop[PROPERTY_VALUE_MAX];
13673 uint32_t globalLogLevel = 0;
13674
13675 property_get("persist.camera.hal.debug", prop, "0");
13676 int val = atoi(prop);
13677 if (0 <= val) {
13678 gCamHal3LogLevel = (uint32_t)val;
13679 }
13680
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013681 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013682 gKpiDebugLevel = atoi(prop);
13683
13684 property_get("persist.camera.global.debug", prop, "0");
13685 val = atoi(prop);
13686 if (0 <= val) {
13687 globalLogLevel = (uint32_t)val;
13688 }
13689
13690 /* Highest log level among hal.logs and global.logs is selected */
13691 if (gCamHal3LogLevel < globalLogLevel)
13692 gCamHal3LogLevel = globalLogLevel;
13693
13694 return;
13695}
13696
13697/*===========================================================================
13698 * FUNCTION : validateStreamRotations
13699 *
13700 * DESCRIPTION: Check if the rotations requested are supported
13701 *
13702 * PARAMETERS :
13703 * @stream_list : streams to be configured
13704 *
13705 * RETURN : NO_ERROR on success
13706 * -EINVAL on failure
13707 *
13708 *==========================================================================*/
13709int QCamera3HardwareInterface::validateStreamRotations(
13710 camera3_stream_configuration_t *streamList)
13711{
13712 int rc = NO_ERROR;
13713
13714 /*
13715 * Loop through all streams requested in configuration
13716 * Check if unsupported rotations have been requested on any of them
13717 */
13718 for (size_t j = 0; j < streamList->num_streams; j++){
13719 camera3_stream_t *newStream = streamList->streams[j];
13720
Emilian Peev35ceeed2017-06-29 11:58:56 -070013721 switch(newStream->rotation) {
13722 case CAMERA3_STREAM_ROTATION_0:
13723 case CAMERA3_STREAM_ROTATION_90:
13724 case CAMERA3_STREAM_ROTATION_180:
13725 case CAMERA3_STREAM_ROTATION_270:
13726 //Expected values
13727 break;
13728 default:
13729 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13730 "type:%d and stream format:%d", __func__,
13731 newStream->rotation, newStream->stream_type,
13732 newStream->format);
13733 return -EINVAL;
13734 }
13735
Thierry Strudel3d639192016-09-09 11:52:26 -070013736 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13737 bool isImplDef = (newStream->format ==
13738 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13739 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13740 isImplDef);
13741
13742 if (isRotated && (!isImplDef || isZsl)) {
13743 LOGE("Error: Unsupported rotation of %d requested for stream"
13744 "type:%d and stream format:%d",
13745 newStream->rotation, newStream->stream_type,
13746 newStream->format);
13747 rc = -EINVAL;
13748 break;
13749 }
13750 }
13751
13752 return rc;
13753}
13754
13755/*===========================================================================
13756* FUNCTION : getFlashInfo
13757*
13758* DESCRIPTION: Retrieve information about whether the device has a flash.
13759*
13760* PARAMETERS :
13761* @cameraId : Camera id to query
13762* @hasFlash : Boolean indicating whether there is a flash device
13763* associated with given camera
13764* @flashNode : If a flash device exists, this will be its device node.
13765*
13766* RETURN :
13767* None
13768*==========================================================================*/
13769void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13770 bool& hasFlash,
13771 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13772{
13773 cam_capability_t* camCapability = gCamCapability[cameraId];
13774 if (NULL == camCapability) {
13775 hasFlash = false;
13776 flashNode[0] = '\0';
13777 } else {
13778 hasFlash = camCapability->flash_available;
13779 strlcpy(flashNode,
13780 (char*)camCapability->flash_dev_name,
13781 QCAMERA_MAX_FILEPATH_LENGTH);
13782 }
13783}
13784
13785/*===========================================================================
13786* FUNCTION : getEepromVersionInfo
13787*
13788* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13789*
13790* PARAMETERS : None
13791*
13792* RETURN : string describing EEPROM version
13793* "\0" if no such info available
13794*==========================================================================*/
13795const char *QCamera3HardwareInterface::getEepromVersionInfo()
13796{
13797 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13798}
13799
13800/*===========================================================================
13801* FUNCTION : getLdafCalib
13802*
13803* DESCRIPTION: Retrieve Laser AF calibration data
13804*
13805* PARAMETERS : None
13806*
13807* RETURN : Two uint32_t describing laser AF calibration data
13808* NULL if none is available.
13809*==========================================================================*/
13810const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13811{
13812 if (mLdafCalibExist) {
13813 return &mLdafCalib[0];
13814 } else {
13815 return NULL;
13816 }
13817}
13818
13819/*===========================================================================
13820 * FUNCTION : dynamicUpdateMetaStreamInfo
13821 *
13822 * DESCRIPTION: This function:
13823 * (1) stops all the channels
13824 * (2) returns error on pending requests and buffers
13825 * (3) sends metastream_info in setparams
13826 * (4) starts all channels
13827 * This is useful when sensor has to be restarted to apply any
13828 * settings such as frame rate from a different sensor mode
13829 *
13830 * PARAMETERS : None
13831 *
13832 * RETURN : NO_ERROR on success
13833 * Error codes on failure
13834 *
13835 *==========================================================================*/
13836int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13837{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013838 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013839 int rc = NO_ERROR;
13840
13841 LOGD("E");
13842
13843 rc = stopAllChannels();
13844 if (rc < 0) {
13845 LOGE("stopAllChannels failed");
13846 return rc;
13847 }
13848
13849 rc = notifyErrorForPendingRequests();
13850 if (rc < 0) {
13851 LOGE("notifyErrorForPendingRequests failed");
13852 return rc;
13853 }
13854
13855 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13856 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13857 "Format:%d",
13858 mStreamConfigInfo.type[i],
13859 mStreamConfigInfo.stream_sizes[i].width,
13860 mStreamConfigInfo.stream_sizes[i].height,
13861 mStreamConfigInfo.postprocess_mask[i],
13862 mStreamConfigInfo.format[i]);
13863 }
13864
13865 /* Send meta stream info once again so that ISP can start */
13866 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13867 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13868 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13869 mParameters);
13870 if (rc < 0) {
13871 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13872 }
13873
13874 rc = startAllChannels();
13875 if (rc < 0) {
13876 LOGE("startAllChannels failed");
13877 return rc;
13878 }
13879
13880 LOGD("X");
13881 return rc;
13882}
13883
13884/*===========================================================================
13885 * FUNCTION : stopAllChannels
13886 *
13887 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13888 *
13889 * PARAMETERS : None
13890 *
13891 * RETURN : NO_ERROR on success
13892 * Error codes on failure
13893 *
13894 *==========================================================================*/
13895int32_t QCamera3HardwareInterface::stopAllChannels()
13896{
13897 int32_t rc = NO_ERROR;
13898
13899 LOGD("Stopping all channels");
13900 // Stop the Streams/Channels
13901 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13902 it != mStreamInfo.end(); it++) {
13903 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13904 if (channel) {
13905 channel->stop();
13906 }
13907 (*it)->status = INVALID;
13908 }
13909
13910 if (mSupportChannel) {
13911 mSupportChannel->stop();
13912 }
13913 if (mAnalysisChannel) {
13914 mAnalysisChannel->stop();
13915 }
13916 if (mRawDumpChannel) {
13917 mRawDumpChannel->stop();
13918 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013919 if (mHdrPlusRawSrcChannel) {
13920 mHdrPlusRawSrcChannel->stop();
13921 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013922 if (mMetadataChannel) {
13923 /* If content of mStreamInfo is not 0, there is metadata stream */
13924 mMetadataChannel->stop();
13925 }
13926
13927 LOGD("All channels stopped");
13928 return rc;
13929}
13930
13931/*===========================================================================
13932 * FUNCTION : startAllChannels
13933 *
13934 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13935 *
13936 * PARAMETERS : None
13937 *
13938 * RETURN : NO_ERROR on success
13939 * Error codes on failure
13940 *
13941 *==========================================================================*/
13942int32_t QCamera3HardwareInterface::startAllChannels()
13943{
13944 int32_t rc = NO_ERROR;
13945
13946 LOGD("Start all channels ");
13947 // Start the Streams/Channels
13948 if (mMetadataChannel) {
13949 /* If content of mStreamInfo is not 0, there is metadata stream */
13950 rc = mMetadataChannel->start();
13951 if (rc < 0) {
13952 LOGE("META channel start failed");
13953 return rc;
13954 }
13955 }
13956 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13957 it != mStreamInfo.end(); it++) {
13958 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13959 if (channel) {
13960 rc = channel->start();
13961 if (rc < 0) {
13962 LOGE("channel start failed");
13963 return rc;
13964 }
13965 }
13966 }
13967 if (mAnalysisChannel) {
13968 mAnalysisChannel->start();
13969 }
13970 if (mSupportChannel) {
13971 rc = mSupportChannel->start();
13972 if (rc < 0) {
13973 LOGE("Support channel start failed");
13974 return rc;
13975 }
13976 }
13977 if (mRawDumpChannel) {
13978 rc = mRawDumpChannel->start();
13979 if (rc < 0) {
13980 LOGE("RAW dump channel start failed");
13981 return rc;
13982 }
13983 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013984 if (mHdrPlusRawSrcChannel) {
13985 rc = mHdrPlusRawSrcChannel->start();
13986 if (rc < 0) {
13987 LOGE("HDR+ RAW channel start failed");
13988 return rc;
13989 }
13990 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013991
13992 LOGD("All channels started");
13993 return rc;
13994}
13995
13996/*===========================================================================
13997 * FUNCTION : notifyErrorForPendingRequests
13998 *
13999 * DESCRIPTION: This function sends error for all the pending requests/buffers
14000 *
14001 * PARAMETERS : None
14002 *
14003 * RETURN : Error codes
14004 * NO_ERROR on success
14005 *
14006 *==========================================================================*/
14007int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14008{
Emilian Peev7650c122017-01-19 08:24:33 -080014009 notifyErrorFoPendingDepthData(mDepthChannel);
14010
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014011 auto pendingRequest = mPendingRequestsList.begin();
14012 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014013
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014014 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14015 // buffers (for which buffers aren't sent yet).
14016 while (pendingRequest != mPendingRequestsList.end() ||
14017 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14018 if (pendingRequest == mPendingRequestsList.end() ||
14019 pendingBuffer->frame_number < pendingRequest->frame_number) {
14020 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14021 // with error.
14022 for (auto &info : pendingBuffer->mPendingBufferList) {
14023 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014024 camera3_notify_msg_t notify_msg;
14025 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14026 notify_msg.type = CAMERA3_MSG_ERROR;
14027 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014028 notify_msg.message.error.error_stream = info.stream;
14029 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014030 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014031
14032 camera3_stream_buffer_t buffer = {};
14033 buffer.acquire_fence = -1;
14034 buffer.release_fence = -1;
14035 buffer.buffer = info.buffer;
14036 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14037 buffer.stream = info.stream;
14038 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014039 }
14040
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014041 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14042 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14043 pendingBuffer->frame_number > pendingRequest->frame_number) {
14044 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014045 camera3_notify_msg_t notify_msg;
14046 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14047 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014048 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14049 notify_msg.message.error.error_stream = nullptr;
14050 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014051 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014052
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014053 if (pendingRequest->input_buffer != nullptr) {
14054 camera3_capture_result result = {};
14055 result.frame_number = pendingRequest->frame_number;
14056 result.result = nullptr;
14057 result.input_buffer = pendingRequest->input_buffer;
14058 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014059 }
14060
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014061 mShutterDispatcher.clear(pendingRequest->frame_number);
14062 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14063 } else {
14064 // If both buffers and result metadata weren't sent yet, notify about a request error
14065 // and return buffers with error.
14066 for (auto &info : pendingBuffer->mPendingBufferList) {
14067 camera3_notify_msg_t notify_msg;
14068 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14069 notify_msg.type = CAMERA3_MSG_ERROR;
14070 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14071 notify_msg.message.error.error_stream = info.stream;
14072 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14073 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014074
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014075 camera3_stream_buffer_t buffer = {};
14076 buffer.acquire_fence = -1;
14077 buffer.release_fence = -1;
14078 buffer.buffer = info.buffer;
14079 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14080 buffer.stream = info.stream;
14081 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14082 }
14083
14084 if (pendingRequest->input_buffer != nullptr) {
14085 camera3_capture_result result = {};
14086 result.frame_number = pendingRequest->frame_number;
14087 result.result = nullptr;
14088 result.input_buffer = pendingRequest->input_buffer;
14089 orchestrateResult(&result);
14090 }
14091
14092 mShutterDispatcher.clear(pendingRequest->frame_number);
14093 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14094 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014095 }
14096 }
14097
14098 /* Reset pending frame Drop list and requests list */
14099 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014100 mShutterDispatcher.clear();
14101 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014102 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014103 LOGH("Cleared all the pending buffers ");
14104
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014105 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014106}
14107
14108bool QCamera3HardwareInterface::isOnEncoder(
14109 const cam_dimension_t max_viewfinder_size,
14110 uint32_t width, uint32_t height)
14111{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014112 return ((width > (uint32_t)max_viewfinder_size.width) ||
14113 (height > (uint32_t)max_viewfinder_size.height) ||
14114 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14115 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014116}
14117
14118/*===========================================================================
14119 * FUNCTION : setBundleInfo
14120 *
14121 * DESCRIPTION: Set bundle info for all streams that are bundle.
14122 *
14123 * PARAMETERS : None
14124 *
14125 * RETURN : NO_ERROR on success
14126 * Error codes on failure
14127 *==========================================================================*/
14128int32_t QCamera3HardwareInterface::setBundleInfo()
14129{
14130 int32_t rc = NO_ERROR;
14131
14132 if (mChannelHandle) {
14133 cam_bundle_config_t bundleInfo;
14134 memset(&bundleInfo, 0, sizeof(bundleInfo));
14135 rc = mCameraHandle->ops->get_bundle_info(
14136 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14137 if (rc != NO_ERROR) {
14138 LOGE("get_bundle_info failed");
14139 return rc;
14140 }
14141 if (mAnalysisChannel) {
14142 mAnalysisChannel->setBundleInfo(bundleInfo);
14143 }
14144 if (mSupportChannel) {
14145 mSupportChannel->setBundleInfo(bundleInfo);
14146 }
14147 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14148 it != mStreamInfo.end(); it++) {
14149 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14150 channel->setBundleInfo(bundleInfo);
14151 }
14152 if (mRawDumpChannel) {
14153 mRawDumpChannel->setBundleInfo(bundleInfo);
14154 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014155 if (mHdrPlusRawSrcChannel) {
14156 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14157 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014158 }
14159
14160 return rc;
14161}
14162
14163/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014164 * FUNCTION : setInstantAEC
14165 *
14166 * DESCRIPTION: Set Instant AEC related params.
14167 *
14168 * PARAMETERS :
14169 * @meta: CameraMetadata reference
14170 *
14171 * RETURN : NO_ERROR on success
14172 * Error codes on failure
14173 *==========================================================================*/
14174int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14175{
14176 int32_t rc = NO_ERROR;
14177 uint8_t val = 0;
14178 char prop[PROPERTY_VALUE_MAX];
14179
14180 // First try to configure instant AEC from framework metadata
14181 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14182 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14183 }
14184
14185 // If framework did not set this value, try to read from set prop.
14186 if (val == 0) {
14187 memset(prop, 0, sizeof(prop));
14188 property_get("persist.camera.instant.aec", prop, "0");
14189 val = (uint8_t)atoi(prop);
14190 }
14191
14192 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14193 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14194 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14195 mInstantAEC = val;
14196 mInstantAECSettledFrameNumber = 0;
14197 mInstantAecFrameIdxCount = 0;
14198 LOGH("instantAEC value set %d",val);
14199 if (mInstantAEC) {
14200 memset(prop, 0, sizeof(prop));
14201 property_get("persist.camera.ae.instant.bound", prop, "10");
14202 int32_t aec_frame_skip_cnt = atoi(prop);
14203 if (aec_frame_skip_cnt >= 0) {
14204 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14205 } else {
14206 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14207 rc = BAD_VALUE;
14208 }
14209 }
14210 } else {
14211 LOGE("Bad instant aec value set %d", val);
14212 rc = BAD_VALUE;
14213 }
14214 return rc;
14215}
14216
14217/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014218 * FUNCTION : get_num_overall_buffers
14219 *
14220 * DESCRIPTION: Estimate number of pending buffers across all requests.
14221 *
14222 * PARAMETERS : None
14223 *
14224 * RETURN : Number of overall pending buffers
14225 *
14226 *==========================================================================*/
14227uint32_t PendingBuffersMap::get_num_overall_buffers()
14228{
14229 uint32_t sum_buffers = 0;
14230 for (auto &req : mPendingBuffersInRequest) {
14231 sum_buffers += req.mPendingBufferList.size();
14232 }
14233 return sum_buffers;
14234}
14235
14236/*===========================================================================
14237 * FUNCTION : removeBuf
14238 *
14239 * DESCRIPTION: Remove a matching buffer from tracker.
14240 *
14241 * PARAMETERS : @buffer: image buffer for the callback
14242 *
14243 * RETURN : None
14244 *
14245 *==========================================================================*/
14246void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14247{
14248 bool buffer_found = false;
14249 for (auto req = mPendingBuffersInRequest.begin();
14250 req != mPendingBuffersInRequest.end(); req++) {
14251 for (auto k = req->mPendingBufferList.begin();
14252 k != req->mPendingBufferList.end(); k++ ) {
14253 if (k->buffer == buffer) {
14254 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14255 req->frame_number, buffer);
14256 k = req->mPendingBufferList.erase(k);
14257 if (req->mPendingBufferList.empty()) {
14258 // Remove this request from Map
14259 req = mPendingBuffersInRequest.erase(req);
14260 }
14261 buffer_found = true;
14262 break;
14263 }
14264 }
14265 if (buffer_found) {
14266 break;
14267 }
14268 }
14269 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14270 get_num_overall_buffers());
14271}
14272
14273/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014274 * FUNCTION : getBufErrStatus
14275 *
14276 * DESCRIPTION: get buffer error status
14277 *
14278 * PARAMETERS : @buffer: buffer handle
14279 *
14280 * RETURN : Error status
14281 *
14282 *==========================================================================*/
14283int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14284{
14285 for (auto& req : mPendingBuffersInRequest) {
14286 for (auto& k : req.mPendingBufferList) {
14287 if (k.buffer == buffer)
14288 return k.bufStatus;
14289 }
14290 }
14291 return CAMERA3_BUFFER_STATUS_OK;
14292}
14293
14294/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014295 * FUNCTION : setPAAFSupport
14296 *
14297 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14298 * feature mask according to stream type and filter
14299 * arrangement
14300 *
14301 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14302 * @stream_type: stream type
14303 * @filter_arrangement: filter arrangement
14304 *
14305 * RETURN : None
14306 *==========================================================================*/
14307void QCamera3HardwareInterface::setPAAFSupport(
14308 cam_feature_mask_t& feature_mask,
14309 cam_stream_type_t stream_type,
14310 cam_color_filter_arrangement_t filter_arrangement)
14311{
Thierry Strudel3d639192016-09-09 11:52:26 -070014312 switch (filter_arrangement) {
14313 case CAM_FILTER_ARRANGEMENT_RGGB:
14314 case CAM_FILTER_ARRANGEMENT_GRBG:
14315 case CAM_FILTER_ARRANGEMENT_GBRG:
14316 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014317 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14318 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014319 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014320 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14321 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014322 }
14323 break;
14324 case CAM_FILTER_ARRANGEMENT_Y:
14325 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14326 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14327 }
14328 break;
14329 default:
14330 break;
14331 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014332 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14333 feature_mask, stream_type, filter_arrangement);
14334
14335
Thierry Strudel3d639192016-09-09 11:52:26 -070014336}
14337
14338/*===========================================================================
14339* FUNCTION : getSensorMountAngle
14340*
14341* DESCRIPTION: Retrieve sensor mount angle
14342*
14343* PARAMETERS : None
14344*
14345* RETURN : sensor mount angle in uint32_t
14346*==========================================================================*/
14347uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14348{
14349 return gCamCapability[mCameraId]->sensor_mount_angle;
14350}
14351
14352/*===========================================================================
14353* FUNCTION : getRelatedCalibrationData
14354*
14355* DESCRIPTION: Retrieve related system calibration data
14356*
14357* PARAMETERS : None
14358*
14359* RETURN : Pointer of related system calibration data
14360*==========================================================================*/
14361const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14362{
14363 return (const cam_related_system_calibration_data_t *)
14364 &(gCamCapability[mCameraId]->related_cam_calibration);
14365}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014366
14367/*===========================================================================
14368 * FUNCTION : is60HzZone
14369 *
14370 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14371 *
14372 * PARAMETERS : None
14373 *
14374 * RETURN : True if in 60Hz zone, False otherwise
14375 *==========================================================================*/
14376bool QCamera3HardwareInterface::is60HzZone()
14377{
14378 time_t t = time(NULL);
14379 struct tm lt;
14380
14381 struct tm* r = localtime_r(&t, &lt);
14382
14383 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14384 return true;
14385 else
14386 return false;
14387}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014388
14389/*===========================================================================
14390 * FUNCTION : adjustBlackLevelForCFA
14391 *
14392 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14393 * of bayer CFA (Color Filter Array).
14394 *
14395 * PARAMETERS : @input: black level pattern in the order of RGGB
14396 * @output: black level pattern in the order of CFA
14397 * @color_arrangement: CFA color arrangement
14398 *
14399 * RETURN : None
14400 *==========================================================================*/
14401template<typename T>
14402void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14403 T input[BLACK_LEVEL_PATTERN_CNT],
14404 T output[BLACK_LEVEL_PATTERN_CNT],
14405 cam_color_filter_arrangement_t color_arrangement)
14406{
14407 switch (color_arrangement) {
14408 case CAM_FILTER_ARRANGEMENT_GRBG:
14409 output[0] = input[1];
14410 output[1] = input[0];
14411 output[2] = input[3];
14412 output[3] = input[2];
14413 break;
14414 case CAM_FILTER_ARRANGEMENT_GBRG:
14415 output[0] = input[2];
14416 output[1] = input[3];
14417 output[2] = input[0];
14418 output[3] = input[1];
14419 break;
14420 case CAM_FILTER_ARRANGEMENT_BGGR:
14421 output[0] = input[3];
14422 output[1] = input[2];
14423 output[2] = input[1];
14424 output[3] = input[0];
14425 break;
14426 case CAM_FILTER_ARRANGEMENT_RGGB:
14427 output[0] = input[0];
14428 output[1] = input[1];
14429 output[2] = input[2];
14430 output[3] = input[3];
14431 break;
14432 default:
14433 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14434 break;
14435 }
14436}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014437
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014438void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14439 CameraMetadata &resultMetadata,
14440 std::shared_ptr<metadata_buffer_t> settings)
14441{
14442 if (settings == nullptr) {
14443 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14444 return;
14445 }
14446
14447 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14448 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14449 }
14450
14451 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14452 String8 str((const char *)gps_methods);
14453 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14454 }
14455
14456 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14457 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14458 }
14459
14460 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14461 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14462 }
14463
14464 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14465 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14466 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14467 }
14468
14469 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14470 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14471 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14472 }
14473
14474 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14475 int32_t fwk_thumb_size[2];
14476 fwk_thumb_size[0] = thumb_size->width;
14477 fwk_thumb_size[1] = thumb_size->height;
14478 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14479 }
14480
14481 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14482 uint8_t fwk_intent = intent[0];
14483 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14484 }
14485}
14486
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014487bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14488 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14489 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014490{
14491 if (hdrPlusRequest == nullptr) return false;
14492
14493 // Check noise reduction mode is high quality.
14494 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14495 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14496 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014497 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14498 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014499 return false;
14500 }
14501
14502 // Check edge mode is high quality.
14503 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14504 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14505 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14506 return false;
14507 }
14508
14509 if (request.num_output_buffers != 1 ||
14510 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14511 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014512 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14513 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14514 request.output_buffers[0].stream->width,
14515 request.output_buffers[0].stream->height,
14516 request.output_buffers[0].stream->format);
14517 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014518 return false;
14519 }
14520
14521 // Get a YUV buffer from pic channel.
14522 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14523 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14524 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14525 if (res != OK) {
14526 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14527 __FUNCTION__, strerror(-res), res);
14528 return false;
14529 }
14530
14531 pbcamera::StreamBuffer buffer;
14532 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014533 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014534 buffer.data = yuvBuffer->buffer;
14535 buffer.dataSize = yuvBuffer->frame_len;
14536
14537 pbcamera::CaptureRequest pbRequest;
14538 pbRequest.id = request.frame_number;
14539 pbRequest.outputBuffers.push_back(buffer);
14540
14541 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014542 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014543 if (res != OK) {
14544 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14545 strerror(-res), res);
14546 return false;
14547 }
14548
14549 hdrPlusRequest->yuvBuffer = yuvBuffer;
14550 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14551
14552 return true;
14553}
14554
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014555status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14556{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014557 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14558 return OK;
14559 }
14560
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014561 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014562 if (res != OK) {
14563 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14564 strerror(-res), res);
14565 return res;
14566 }
14567 gHdrPlusClientOpening = true;
14568
14569 return OK;
14570}
14571
Chien-Yu Chenee335912017-02-09 17:53:20 -080014572status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14573{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014574 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014575
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014576 if (mHdrPlusModeEnabled) {
14577 return OK;
14578 }
14579
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014580 // Check if gHdrPlusClient is opened or being opened.
14581 if (gHdrPlusClient == nullptr) {
14582 if (gHdrPlusClientOpening) {
14583 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14584 return OK;
14585 }
14586
14587 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014588 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014589 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14590 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014591 return res;
14592 }
14593
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014594 // When opening HDR+ client completes, HDR+ mode will be enabled.
14595 return OK;
14596
Chien-Yu Chenee335912017-02-09 17:53:20 -080014597 }
14598
14599 // Configure stream for HDR+.
14600 res = configureHdrPlusStreamsLocked();
14601 if (res != OK) {
14602 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014603 return res;
14604 }
14605
14606 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14607 res = gHdrPlusClient->setZslHdrPlusMode(true);
14608 if (res != OK) {
14609 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014610 return res;
14611 }
14612
14613 mHdrPlusModeEnabled = true;
14614 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14615
14616 return OK;
14617}
14618
14619void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14620{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014621 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014622 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014623 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14624 if (res != OK) {
14625 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14626 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014627
14628 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014629 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014630 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014631 }
14632
14633 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014634 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014635 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14636}
14637
14638status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014639{
14640 pbcamera::InputConfiguration inputConfig;
14641 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14642 status_t res = OK;
14643
14644 // Configure HDR+ client streams.
14645 // Get input config.
14646 if (mHdrPlusRawSrcChannel) {
14647 // HDR+ input buffers will be provided by HAL.
14648 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14649 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14650 if (res != OK) {
14651 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14652 __FUNCTION__, strerror(-res), res);
14653 return res;
14654 }
14655
14656 inputConfig.isSensorInput = false;
14657 } else {
14658 // Sensor MIPI will send data to Easel.
14659 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014660 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014661 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14662 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14663 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14664 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14665 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014666 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014667 if (mSensorModeInfo.num_raw_bits != 10) {
14668 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14669 mSensorModeInfo.num_raw_bits);
14670 return BAD_VALUE;
14671 }
14672
14673 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014674 }
14675
14676 // Get output configurations.
14677 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014678 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014679
14680 // Easel may need to output YUV output buffers if mPictureChannel was created.
14681 pbcamera::StreamConfiguration yuvOutputConfig;
14682 if (mPictureChannel != nullptr) {
14683 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14684 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14685 if (res != OK) {
14686 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14687 __FUNCTION__, strerror(-res), res);
14688
14689 return res;
14690 }
14691
14692 outputStreamConfigs.push_back(yuvOutputConfig);
14693 }
14694
14695 // TODO: consider other channels for YUV output buffers.
14696
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014697 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014698 if (res != OK) {
14699 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14700 strerror(-res), res);
14701 return res;
14702 }
14703
14704 return OK;
14705}
14706
Chien-Yu Chen933db802017-07-14 14:31:53 -070014707void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
14708{
14709 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
14710 // Set HAL state to error.
14711 pthread_mutex_lock(&mMutex);
14712 mState = ERROR;
14713 pthread_mutex_unlock(&mMutex);
14714
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014715 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen933db802017-07-14 14:31:53 -070014716}
14717
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014718void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14719{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014720 if (client == nullptr) {
14721 ALOGE("%s: Opened client is null.", __FUNCTION__);
14722 return;
14723 }
14724
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014725 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014726 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14727
14728 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014729 if (!gHdrPlusClientOpening) {
14730 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14731 return;
14732 }
14733
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014734 gHdrPlusClient = std::move(client);
14735 gHdrPlusClientOpening = false;
14736
14737 // Set static metadata.
14738 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14739 if (res != OK) {
14740 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14741 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014742 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014743 gHdrPlusClient = nullptr;
14744 return;
14745 }
14746
14747 // Enable HDR+ mode.
14748 res = enableHdrPlusModeLocked();
14749 if (res != OK) {
14750 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14751 }
14752}
14753
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014754void QCamera3HardwareInterface::onOpenFailed(status_t err)
14755{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014756 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14757 Mutex::Autolock l(gHdrPlusClientLock);
14758 gHdrPlusClientOpening = false;
14759}
14760
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014761void QCamera3HardwareInterface::onFatalError()
14762{
14763 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14764
14765 // Set HAL state to error.
14766 pthread_mutex_lock(&mMutex);
14767 mState = ERROR;
14768 pthread_mutex_unlock(&mMutex);
14769
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014770 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014771}
14772
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014773void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014774 const camera_metadata_t &resultMetadata)
14775{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014776 if (result != nullptr) {
14777 if (result->outputBuffers.size() != 1) {
14778 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14779 result->outputBuffers.size());
14780 return;
14781 }
14782
14783 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14784 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14785 result->outputBuffers[0].streamId);
14786 return;
14787 }
14788
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014789 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014790 HdrPlusPendingRequest pendingRequest;
14791 {
14792 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14793 auto req = mHdrPlusPendingRequests.find(result->requestId);
14794 pendingRequest = req->second;
14795 }
14796
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014797 // Update the result metadata with the settings of the HDR+ still capture request because
14798 // the result metadata belongs to a ZSL buffer.
14799 CameraMetadata metadata;
14800 metadata = &resultMetadata;
14801 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14802 camera_metadata_t* updatedResultMetadata = metadata.release();
14803
14804 QCamera3PicChannel *picChannel =
14805 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14806
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014807 // Check if dumping HDR+ YUV output is enabled.
14808 char prop[PROPERTY_VALUE_MAX];
14809 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14810 bool dumpYuvOutput = atoi(prop);
14811
14812 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014813 // Dump yuv buffer to a ppm file.
14814 pbcamera::StreamConfiguration outputConfig;
14815 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14816 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14817 if (rc == OK) {
14818 char buf[FILENAME_MAX] = {};
14819 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14820 result->requestId, result->outputBuffers[0].streamId,
14821 outputConfig.image.width, outputConfig.image.height);
14822
14823 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14824 } else {
14825 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14826 __FUNCTION__, strerror(-rc), rc);
14827 }
14828 }
14829
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014830 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14831 auto halMetadata = std::make_shared<metadata_buffer_t>();
14832 clear_metadata_buffer(halMetadata.get());
14833
14834 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14835 // encoding.
14836 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14837 halStreamId, /*minFrameDuration*/0);
14838 if (res == OK) {
14839 // Return the buffer to pic channel for encoding.
14840 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14841 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14842 halMetadata);
14843 } else {
14844 // Return the buffer without encoding.
14845 // TODO: This should not happen but we may want to report an error buffer to camera
14846 // service.
14847 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14848 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14849 strerror(-res), res);
14850 }
14851
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014852 // Find the timestamp
14853 camera_metadata_ro_entry_t entry;
14854 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14855 ANDROID_SENSOR_TIMESTAMP, &entry);
14856 if (res != OK) {
14857 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14858 __FUNCTION__, result->requestId, strerror(-res), res);
14859 } else {
14860 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14861 }
14862
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014863 // Send HDR+ metadata to framework.
14864 {
14865 pthread_mutex_lock(&mMutex);
14866
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014867 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14868 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014869 pthread_mutex_unlock(&mMutex);
14870 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014871
14872 // Remove the HDR+ pending request.
14873 {
14874 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14875 auto req = mHdrPlusPendingRequests.find(result->requestId);
14876 mHdrPlusPendingRequests.erase(req);
14877 }
14878 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014879}
14880
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014881void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14882{
14883 if (failedResult == nullptr) {
14884 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14885 return;
14886 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014887
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014888 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014889
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014890 // Remove the pending HDR+ request.
14891 {
14892 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14893 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14894
14895 // Return the buffer to pic channel.
14896 QCamera3PicChannel *picChannel =
14897 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14898 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14899
14900 mHdrPlusPendingRequests.erase(pendingRequest);
14901 }
14902
14903 pthread_mutex_lock(&mMutex);
14904
14905 // Find the pending buffers.
14906 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14907 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14908 if (pendingBuffers->frame_number == failedResult->requestId) {
14909 break;
14910 }
14911 pendingBuffers++;
14912 }
14913
14914 // Send out buffer errors for the pending buffers.
14915 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14916 std::vector<camera3_stream_buffer_t> streamBuffers;
14917 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14918 // Prepare a stream buffer.
14919 camera3_stream_buffer_t streamBuffer = {};
14920 streamBuffer.stream = buffer.stream;
14921 streamBuffer.buffer = buffer.buffer;
14922 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14923 streamBuffer.acquire_fence = -1;
14924 streamBuffer.release_fence = -1;
14925
14926 streamBuffers.push_back(streamBuffer);
14927
14928 // Send out error buffer event.
14929 camera3_notify_msg_t notify_msg = {};
14930 notify_msg.type = CAMERA3_MSG_ERROR;
14931 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14932 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14933 notify_msg.message.error.error_stream = buffer.stream;
14934
14935 orchestrateNotify(&notify_msg);
14936 }
14937
14938 camera3_capture_result_t result = {};
14939 result.frame_number = pendingBuffers->frame_number;
14940 result.num_output_buffers = streamBuffers.size();
14941 result.output_buffers = &streamBuffers[0];
14942
14943 // Send out result with buffer errors.
14944 orchestrateResult(&result);
14945
14946 // Remove pending buffers.
14947 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14948 }
14949
14950 // Remove pending request.
14951 auto halRequest = mPendingRequestsList.begin();
14952 while (halRequest != mPendingRequestsList.end()) {
14953 if (halRequest->frame_number == failedResult->requestId) {
14954 mPendingRequestsList.erase(halRequest);
14955 break;
14956 }
14957 halRequest++;
14958 }
14959
14960 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014961}
14962
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014963
14964ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14965 mParent(parent) {}
14966
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014967void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014968{
14969 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014970
14971 if (isReprocess) {
14972 mReprocessShutters.emplace(frameNumber, Shutter());
14973 } else {
14974 mShutters.emplace(frameNumber, Shutter());
14975 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014976}
14977
14978void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14979{
14980 std::lock_guard<std::mutex> lock(mLock);
14981
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014982 std::map<uint32_t, Shutter> *shutters = nullptr;
14983
14984 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014985 auto shutter = mShutters.find(frameNumber);
14986 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014987 shutter = mReprocessShutters.find(frameNumber);
14988 if (shutter == mReprocessShutters.end()) {
14989 // Shutter was already sent.
14990 return;
14991 }
14992 shutters = &mReprocessShutters;
14993 } else {
14994 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014995 }
14996
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014997 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014998 shutter->second.ready = true;
14999 shutter->second.timestamp = timestamp;
15000
15001 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015002 shutter = shutters->begin();
15003 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015004 if (!shutter->second.ready) {
15005 // If this shutter is not ready, the following shutters can't be sent.
15006 break;
15007 }
15008
15009 camera3_notify_msg_t msg = {};
15010 msg.type = CAMERA3_MSG_SHUTTER;
15011 msg.message.shutter.frame_number = shutter->first;
15012 msg.message.shutter.timestamp = shutter->second.timestamp;
15013 mParent->orchestrateNotify(&msg);
15014
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015015 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015016 }
15017}
15018
15019void ShutterDispatcher::clear(uint32_t frameNumber)
15020{
15021 std::lock_guard<std::mutex> lock(mLock);
15022 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015023 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015024}
15025
15026void ShutterDispatcher::clear()
15027{
15028 std::lock_guard<std::mutex> lock(mLock);
15029
15030 // Log errors for stale shutters.
15031 for (auto &shutter : mShutters) {
15032 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15033 __FUNCTION__, shutter.first, shutter.second.ready,
15034 shutter.second.timestamp);
15035 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015036
15037 // Log errors for stale reprocess shutters.
15038 for (auto &shutter : mReprocessShutters) {
15039 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15040 __FUNCTION__, shutter.first, shutter.second.ready,
15041 shutter.second.timestamp);
15042 }
15043
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015044 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015045 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015046}
15047
15048OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15049 mParent(parent) {}
15050
15051status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15052{
15053 std::lock_guard<std::mutex> lock(mLock);
15054 mStreamBuffers.clear();
15055 if (!streamList) {
15056 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15057 return -EINVAL;
15058 }
15059
15060 // Create a "frame-number -> buffer" map for each stream.
15061 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15062 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15063 }
15064
15065 return OK;
15066}
15067
15068status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15069{
15070 std::lock_guard<std::mutex> lock(mLock);
15071
15072 // Find the "frame-number -> buffer" map for the stream.
15073 auto buffers = mStreamBuffers.find(stream);
15074 if (buffers == mStreamBuffers.end()) {
15075 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15076 return -EINVAL;
15077 }
15078
15079 // Create an unready buffer for this frame number.
15080 buffers->second.emplace(frameNumber, Buffer());
15081 return OK;
15082}
15083
15084void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15085 const camera3_stream_buffer_t &buffer)
15086{
15087 std::lock_guard<std::mutex> lock(mLock);
15088
15089 // Find the frame number -> buffer map for the stream.
15090 auto buffers = mStreamBuffers.find(buffer.stream);
15091 if (buffers == mStreamBuffers.end()) {
15092 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15093 return;
15094 }
15095
15096 // Find the unready buffer this frame number and mark it ready.
15097 auto pendingBuffer = buffers->second.find(frameNumber);
15098 if (pendingBuffer == buffers->second.end()) {
15099 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15100 return;
15101 }
15102
15103 pendingBuffer->second.ready = true;
15104 pendingBuffer->second.buffer = buffer;
15105
15106 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15107 pendingBuffer = buffers->second.begin();
15108 while (pendingBuffer != buffers->second.end()) {
15109 if (!pendingBuffer->second.ready) {
15110 // If this buffer is not ready, the following buffers can't be sent.
15111 break;
15112 }
15113
15114 camera3_capture_result_t result = {};
15115 result.frame_number = pendingBuffer->first;
15116 result.num_output_buffers = 1;
15117 result.output_buffers = &pendingBuffer->second.buffer;
15118
15119 // Send out result with buffer errors.
15120 mParent->orchestrateResult(&result);
15121
15122 pendingBuffer = buffers->second.erase(pendingBuffer);
15123 }
15124}
15125
15126void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15127{
15128 std::lock_guard<std::mutex> lock(mLock);
15129
15130 // Log errors for stale buffers.
15131 for (auto &buffers : mStreamBuffers) {
15132 for (auto &buffer : buffers.second) {
15133 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15134 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15135 }
15136 buffers.second.clear();
15137 }
15138
15139 if (clearConfiguredStreams) {
15140 mStreamBuffers.clear();
15141 }
15142}
15143
Thierry Strudel3d639192016-09-09 11:52:26 -070015144}; //end namespace qcamera