blob: fd6d14c1fc5163317ff7b37d9b1af943780974dd [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
Jiyong Parkd4caeb72017-06-12 17:16:36 +090068using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070069using namespace android;
70
71namespace qcamera {
72
73#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
74
75#define EMPTY_PIPELINE_DELAY 2
76#define PARTIAL_RESULT_COUNT 2
77#define FRAME_SKIP_DELAY 0
78
79#define MAX_VALUE_8BIT ((1<<8)-1)
80#define MAX_VALUE_10BIT ((1<<10)-1)
81#define MAX_VALUE_12BIT ((1<<12)-1)
82
83#define VIDEO_4K_WIDTH 3840
84#define VIDEO_4K_HEIGHT 2160
85
Jason Leeb9e76432017-03-10 17:14:19 -080086#define MAX_EIS_WIDTH 3840
87#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070088
89#define MAX_RAW_STREAMS 1
90#define MAX_STALLING_STREAMS 1
91#define MAX_PROCESSED_STREAMS 3
92/* Batch mode is enabled only if FPS set is equal to or greater than this */
93#define MIN_FPS_FOR_BATCH_MODE (120)
94#define PREVIEW_FPS_FOR_HFR (30)
95#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080096#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070097#define MAX_HFR_BATCH_SIZE (8)
98#define REGIONS_TUPLE_COUNT 5
99#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
101#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700137// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
138#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
139
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700140// Whether to check for the GPU stride padding, or use the default
141//#define CHECK_GPU_PIXEL_ALIGNMENT
142
Thierry Strudel3d639192016-09-09 11:52:26 -0700143cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
144const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
145extern pthread_mutex_t gCamLock;
146volatile uint32_t gCamHal3LogLevel = 1;
147extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700148
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800149// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150// The following Easel related variables must be protected by gHdrPlusClientLock.
151EaselManagerClient gEaselManagerClient;
152bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
153std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
154bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700155bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700156bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700157
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800158// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
159bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
161Mutex gHdrPlusClientLock; // Protect above Easel related variables.
162
Thierry Strudel3d639192016-09-09 11:52:26 -0700163
164const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
165 {"On", CAM_CDS_MODE_ON},
166 {"Off", CAM_CDS_MODE_OFF},
167 {"Auto",CAM_CDS_MODE_AUTO}
168};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_video_hdr_mode_t,
171 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
172 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
173 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
174};
175
Thierry Strudel54dc9782017-02-15 12:12:10 -0800176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_binning_correction_mode_t,
178 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
179 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
180 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
181};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700182
183const QCamera3HardwareInterface::QCameraMap<
184 camera_metadata_enum_android_ir_mode_t,
185 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
186 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
187 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
188 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
189};
Thierry Strudel3d639192016-09-09 11:52:26 -0700190
191const QCamera3HardwareInterface::QCameraMap<
192 camera_metadata_enum_android_control_effect_mode_t,
193 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
194 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
195 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
196 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
197 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
199 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
200 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
202 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
203};
204
205const QCamera3HardwareInterface::QCameraMap<
206 camera_metadata_enum_android_control_awb_mode_t,
207 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
208 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
209 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
210 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
211 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
212 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
213 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
215 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
216 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220 camera_metadata_enum_android_control_scene_mode_t,
221 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
222 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
223 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
224 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
225 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
227 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
228 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
229 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
230 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
231 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
232 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
233 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
234 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
235 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
236 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800237 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
238 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700239};
240
241const QCamera3HardwareInterface::QCameraMap<
242 camera_metadata_enum_android_control_af_mode_t,
243 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
245 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
246 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
247 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
248 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
250 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
251};
252
253const QCamera3HardwareInterface::QCameraMap<
254 camera_metadata_enum_android_color_correction_aberration_mode_t,
255 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
257 CAM_COLOR_CORRECTION_ABERRATION_OFF },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
259 CAM_COLOR_CORRECTION_ABERRATION_FAST },
260 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
261 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
262};
263
264const QCamera3HardwareInterface::QCameraMap<
265 camera_metadata_enum_android_control_ae_antibanding_mode_t,
266 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
270 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
271};
272
273const QCamera3HardwareInterface::QCameraMap<
274 camera_metadata_enum_android_control_ae_mode_t,
275 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
276 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
279 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
280 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_flash_mode_t,
285 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
287 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 camera_metadata_enum_android_statistics_face_detect_mode_t,
293 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303 CAM_FOCUS_UNCALIBRATED },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305 CAM_FOCUS_APPROXIMATE },
306 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307 CAM_FOCUS_CALIBRATED }
308};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_lens_state_t,
312 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
314 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
315};
316
317const int32_t available_thumbnail_sizes[] = {0, 0,
318 176, 144,
319 240, 144,
320 256, 144,
321 240, 160,
322 256, 154,
323 240, 240,
324 320, 240};
325
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_test_pattern_mode_t,
328 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
335};
336
337/* Since there is no mapping for all the options some Android enum are not listed.
338 * Also, the order in this list is important because while mapping from HAL to Android it will
339 * traverse from lower to higher index which means that for HAL values that are map to different
340 * Android values, the traverse logic will select the first one found.
341 */
342const QCamera3HardwareInterface::QCameraMap<
343 camera_metadata_enum_android_sensor_reference_illuminant1_t,
344 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361};
362
363const QCamera3HardwareInterface::QCameraMap<
364 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365 { 60, CAM_HFR_MODE_60FPS},
366 { 90, CAM_HFR_MODE_90FPS},
367 { 120, CAM_HFR_MODE_120FPS},
368 { 150, CAM_HFR_MODE_150FPS},
369 { 180, CAM_HFR_MODE_180FPS},
370 { 210, CAM_HFR_MODE_210FPS},
371 { 240, CAM_HFR_MODE_240FPS},
372 { 480, CAM_HFR_MODE_480FPS},
373};
374
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700375const QCamera3HardwareInterface::QCameraMap<
376 qcamera3_ext_instant_aec_mode_t,
377 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800382
383const QCamera3HardwareInterface::QCameraMap<
384 qcamera3_ext_exposure_meter_mode_t,
385 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393};
394
395const QCamera3HardwareInterface::QCameraMap<
396 qcamera3_ext_iso_mode_t,
397 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406};
407
Thierry Strudel3d639192016-09-09 11:52:26 -0700408camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409 .initialize = QCamera3HardwareInterface::initialize,
410 .configure_streams = QCamera3HardwareInterface::configure_streams,
411 .register_stream_buffers = NULL,
412 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
414 .get_metadata_vendor_tag_ops = NULL,
415 .dump = QCamera3HardwareInterface::dump,
416 .flush = QCamera3HardwareInterface::flush,
417 .reserved = {0},
418};
419
420// initialise to some default value
421uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700423static inline void logEaselEvent(const char *tag, const char *event) {
424 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425 struct timespec ts = {};
426 static int64_t kMsPerSec = 1000;
427 static int64_t kNsPerMs = 1000000;
428 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429 if (res != OK) {
430 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431 } else {
432 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434 }
435 }
436}
437
Thierry Strudel3d639192016-09-09 11:52:26 -0700438/*===========================================================================
439 * FUNCTION : QCamera3HardwareInterface
440 *
441 * DESCRIPTION: constructor of QCamera3HardwareInterface
442 *
443 * PARAMETERS :
444 * @cameraId : camera ID
445 *
446 * RETURN : none
447 *==========================================================================*/
448QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449 const camera_module_callbacks_t *callbacks)
450 : mCameraId(cameraId),
451 mCameraHandle(NULL),
452 mCameraInitialized(false),
453 mCallbackOps(NULL),
454 mMetadataChannel(NULL),
455 mPictureChannel(NULL),
456 mRawChannel(NULL),
457 mSupportChannel(NULL),
458 mAnalysisChannel(NULL),
459 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700460 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800462 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800463 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700464 mChannelHandle(0),
465 mFirstConfiguration(true),
466 mFlush(false),
467 mFlushPerf(false),
468 mParamHeap(NULL),
469 mParameters(NULL),
470 mPrevParameters(NULL),
471 m_bIsVideo(false),
472 m_bIs4KVideo(false),
473 m_bEisSupportedSize(false),
474 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800475 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700476 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700477 mShutterDispatcher(this),
478 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 mMinProcessedFrameDuration(0),
480 mMinJpegFrameDuration(0),
481 mMinRawFrameDuration(0),
482 mMetaFrameCount(0U),
483 mUpdateDebugLevel(false),
484 mCallbacks(callbacks),
485 mCaptureIntent(0),
486 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700487 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800488 /* DevCamDebug metadata internal m control*/
489 mDevCamDebugMetaEnable(0),
490 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700491 mBatchSize(0),
492 mToBeQueuedVidBufs(0),
493 mHFRVideoFps(DEFAULT_VIDEO_FPS),
494 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800495 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800496 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700497 mFirstFrameNumberInBatch(0),
498 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800499 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700500 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
501 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000502 mPDSupported(false),
503 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700504 mInstantAEC(false),
505 mResetInstantAEC(false),
506 mInstantAECSettledFrameNumber(0),
507 mAecSkipDisplayFrameBound(0),
508 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800509 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700512 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mState(CLOSED),
514 mIsDeviceLinked(false),
515 mIsMainCamera(true),
516 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700517 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800518 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800519 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700520 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800521 mIsApInputUsedForHdrPlus(false),
522 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800523 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700524{
525 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700526 mCommon.init(gCamCapability[cameraId]);
527 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700528#ifndef USE_HAL_3_3
529 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
530#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700531 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700532#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700533 mCameraDevice.common.close = close_camera_device;
534 mCameraDevice.ops = &mCameraOps;
535 mCameraDevice.priv = this;
536 gCamCapability[cameraId]->version = CAM_HAL_V3;
537 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
538 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
539 gCamCapability[cameraId]->min_num_pp_bufs = 3;
540
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800541 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700542
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800543 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700544 mPendingLiveRequest = 0;
545 mCurrentRequestId = -1;
546 pthread_mutex_init(&mMutex, NULL);
547
548 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
549 mDefaultMetadata[i] = NULL;
550
551 // Getting system props of different kinds
552 char prop[PROPERTY_VALUE_MAX];
553 memset(prop, 0, sizeof(prop));
554 property_get("persist.camera.raw.dump", prop, "0");
555 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800556 property_get("persist.camera.hal3.force.hdr", prop, "0");
557 mForceHdrSnapshot = atoi(prop);
558
Thierry Strudel3d639192016-09-09 11:52:26 -0700559 if (mEnableRawDump)
560 LOGD("Raw dump from Camera HAL enabled");
561
562 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
563 memset(mLdafCalib, 0, sizeof(mLdafCalib));
564
565 memset(prop, 0, sizeof(prop));
566 property_get("persist.camera.tnr.preview", prop, "0");
567 m_bTnrPreview = (uint8_t)atoi(prop);
568
569 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800570 property_get("persist.camera.swtnr.preview", prop, "1");
571 m_bSwTnrPreview = (uint8_t)atoi(prop);
572
573 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700574 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700575 m_bTnrVideo = (uint8_t)atoi(prop);
576
577 memset(prop, 0, sizeof(prop));
578 property_get("persist.camera.avtimer.debug", prop, "0");
579 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800580 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700581
Thierry Strudel54dc9782017-02-15 12:12:10 -0800582 memset(prop, 0, sizeof(prop));
583 property_get("persist.camera.cacmode.disable", prop, "0");
584 m_cacModeDisabled = (uint8_t)atoi(prop);
585
Thierry Strudel3d639192016-09-09 11:52:26 -0700586 //Load and read GPU library.
587 lib_surface_utils = NULL;
588 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700589 mSurfaceStridePadding = CAM_PAD_TO_64;
590#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700591 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
592 if (lib_surface_utils) {
593 *(void **)&LINK_get_surface_pixel_alignment =
594 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
595 if (LINK_get_surface_pixel_alignment) {
596 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
597 }
598 dlclose(lib_surface_utils);
599 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700600#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000601 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
602 mPDSupported = (0 <= mPDIndex) ? true : false;
603
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700604 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700605}
606
607/*===========================================================================
608 * FUNCTION : ~QCamera3HardwareInterface
609 *
610 * DESCRIPTION: destructor of QCamera3HardwareInterface
611 *
612 * PARAMETERS : none
613 *
614 * RETURN : none
615 *==========================================================================*/
616QCamera3HardwareInterface::~QCamera3HardwareInterface()
617{
618 LOGD("E");
619
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800620 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700621
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800622 // Disable power hint and enable the perf lock for close camera
623 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
624 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
625
626 // unlink of dualcam during close camera
627 if (mIsDeviceLinked) {
628 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
629 &m_pDualCamCmdPtr->bundle_info;
630 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
631 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
632 pthread_mutex_lock(&gCamLock);
633
634 if (mIsMainCamera == 1) {
635 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
636 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
637 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
638 // related session id should be session id of linked session
639 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
640 } else {
641 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
642 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
643 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
644 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
645 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800646 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800647 pthread_mutex_unlock(&gCamLock);
648
649 rc = mCameraHandle->ops->set_dual_cam_cmd(
650 mCameraHandle->camera_handle);
651 if (rc < 0) {
652 LOGE("Dualcam: Unlink failed, but still proceed to close");
653 }
654 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700655
656 /* We need to stop all streams before deleting any stream */
657 if (mRawDumpChannel) {
658 mRawDumpChannel->stop();
659 }
660
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700661 if (mHdrPlusRawSrcChannel) {
662 mHdrPlusRawSrcChannel->stop();
663 }
664
Thierry Strudel3d639192016-09-09 11:52:26 -0700665 // NOTE: 'camera3_stream_t *' objects are already freed at
666 // this stage by the framework
667 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
668 it != mStreamInfo.end(); it++) {
669 QCamera3ProcessingChannel *channel = (*it)->channel;
670 if (channel) {
671 channel->stop();
672 }
673 }
674 if (mSupportChannel)
675 mSupportChannel->stop();
676
677 if (mAnalysisChannel) {
678 mAnalysisChannel->stop();
679 }
680 if (mMetadataChannel) {
681 mMetadataChannel->stop();
682 }
683 if (mChannelHandle) {
684 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
685 mChannelHandle);
686 LOGD("stopping channel %d", mChannelHandle);
687 }
688
689 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
690 it != mStreamInfo.end(); it++) {
691 QCamera3ProcessingChannel *channel = (*it)->channel;
692 if (channel)
693 delete channel;
694 free (*it);
695 }
696 if (mSupportChannel) {
697 delete mSupportChannel;
698 mSupportChannel = NULL;
699 }
700
701 if (mAnalysisChannel) {
702 delete mAnalysisChannel;
703 mAnalysisChannel = NULL;
704 }
705 if (mRawDumpChannel) {
706 delete mRawDumpChannel;
707 mRawDumpChannel = NULL;
708 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700709 if (mHdrPlusRawSrcChannel) {
710 delete mHdrPlusRawSrcChannel;
711 mHdrPlusRawSrcChannel = NULL;
712 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700713 if (mDummyBatchChannel) {
714 delete mDummyBatchChannel;
715 mDummyBatchChannel = NULL;
716 }
717
718 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800719 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700720
721 if (mMetadataChannel) {
722 delete mMetadataChannel;
723 mMetadataChannel = NULL;
724 }
725
726 /* Clean up all channels */
727 if (mCameraInitialized) {
728 if(!mFirstConfiguration){
729 //send the last unconfigure
730 cam_stream_size_info_t stream_config_info;
731 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
732 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
733 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800734 m_bIs4KVideo ? 0 :
735 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700736 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700737 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
738 stream_config_info);
739 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
740 if (rc < 0) {
741 LOGE("set_parms failed for unconfigure");
742 }
743 }
744 deinitParameters();
745 }
746
747 if (mChannelHandle) {
748 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
749 mChannelHandle);
750 LOGH("deleting channel %d", mChannelHandle);
751 mChannelHandle = 0;
752 }
753
754 if (mState != CLOSED)
755 closeCamera();
756
757 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
758 req.mPendingBufferList.clear();
759 }
760 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700761 for (pendingRequestIterator i = mPendingRequestsList.begin();
762 i != mPendingRequestsList.end();) {
763 i = erasePendingRequest(i);
764 }
765 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
766 if (mDefaultMetadata[i])
767 free_camera_metadata(mDefaultMetadata[i]);
768
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800769 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700770
771 pthread_cond_destroy(&mRequestCond);
772
773 pthread_cond_destroy(&mBuffersCond);
774
775 pthread_mutex_destroy(&mMutex);
776 LOGD("X");
777}
778
779/*===========================================================================
780 * FUNCTION : erasePendingRequest
781 *
782 * DESCRIPTION: function to erase a desired pending request after freeing any
783 * allocated memory
784 *
785 * PARAMETERS :
786 * @i : iterator pointing to pending request to be erased
787 *
788 * RETURN : iterator pointing to the next request
789 *==========================================================================*/
790QCamera3HardwareInterface::pendingRequestIterator
791 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
792{
793 if (i->input_buffer != NULL) {
794 free(i->input_buffer);
795 i->input_buffer = NULL;
796 }
797 if (i->settings != NULL)
798 free_camera_metadata((camera_metadata_t*)i->settings);
799 return mPendingRequestsList.erase(i);
800}
801
802/*===========================================================================
803 * FUNCTION : camEvtHandle
804 *
805 * DESCRIPTION: Function registered to mm-camera-interface to handle events
806 *
807 * PARAMETERS :
808 * @camera_handle : interface layer camera handle
809 * @evt : ptr to event
810 * @user_data : user data ptr
811 *
812 * RETURN : none
813 *==========================================================================*/
814void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
815 mm_camera_event_t *evt,
816 void *user_data)
817{
818 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
819 if (obj && evt) {
820 switch(evt->server_event_type) {
821 case CAM_EVENT_TYPE_DAEMON_DIED:
822 pthread_mutex_lock(&obj->mMutex);
823 obj->mState = ERROR;
824 pthread_mutex_unlock(&obj->mMutex);
825 LOGE("Fatal, camera daemon died");
826 break;
827
828 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
829 LOGD("HAL got request pull from Daemon");
830 pthread_mutex_lock(&obj->mMutex);
831 obj->mWokenUpByDaemon = true;
832 obj->unblockRequestIfNecessary();
833 pthread_mutex_unlock(&obj->mMutex);
834 break;
835
836 default:
837 LOGW("Warning: Unhandled event %d",
838 evt->server_event_type);
839 break;
840 }
841 } else {
842 LOGE("NULL user_data/evt");
843 }
844}
845
846/*===========================================================================
847 * FUNCTION : openCamera
848 *
849 * DESCRIPTION: open camera
850 *
851 * PARAMETERS :
852 * @hw_device : double ptr for camera device struct
853 *
854 * RETURN : int32_t type of status
855 * NO_ERROR -- success
856 * none-zero failure code
857 *==========================================================================*/
858int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
859{
860 int rc = 0;
861 if (mState != CLOSED) {
862 *hw_device = NULL;
863 return PERMISSION_DENIED;
864 }
865
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700866 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800867 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700868 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
869 mCameraId);
870
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700871 if (mCameraHandle) {
872 LOGE("Failure: Camera already opened");
873 return ALREADY_EXISTS;
874 }
875
876 {
877 Mutex::Autolock l(gHdrPlusClientLock);
878 if (gEaselManagerClient.isEaselPresentOnDevice()) {
879 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
880 rc = gEaselManagerClient.resume();
881 if (rc != 0) {
882 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
883 return rc;
884 }
885 }
886 }
887
Thierry Strudel3d639192016-09-09 11:52:26 -0700888 rc = openCamera();
889 if (rc == 0) {
890 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800891 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700892 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700893
894 // Suspend Easel because opening camera failed.
895 {
896 Mutex::Autolock l(gHdrPlusClientLock);
897 if (gEaselManagerClient.isEaselPresentOnDevice()) {
898 status_t suspendErr = gEaselManagerClient.suspend();
899 if (suspendErr != 0) {
900 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
901 strerror(-suspendErr), suspendErr);
902 }
903 }
904 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800905 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700906
Thierry Strudel3d639192016-09-09 11:52:26 -0700907 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
908 mCameraId, rc);
909
910 if (rc == NO_ERROR) {
911 mState = OPENED;
912 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800913
Thierry Strudel3d639192016-09-09 11:52:26 -0700914 return rc;
915}
916
917/*===========================================================================
918 * FUNCTION : openCamera
919 *
920 * DESCRIPTION: open camera
921 *
922 * PARAMETERS : none
923 *
924 * RETURN : int32_t type of status
925 * NO_ERROR -- success
926 * none-zero failure code
927 *==========================================================================*/
928int QCamera3HardwareInterface::openCamera()
929{
930 int rc = 0;
931 char value[PROPERTY_VALUE_MAX];
932
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800933 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800934
Thierry Strudel3d639192016-09-09 11:52:26 -0700935 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
936 if (rc < 0) {
937 LOGE("Failed to reserve flash for camera id: %d",
938 mCameraId);
939 return UNKNOWN_ERROR;
940 }
941
942 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
943 if (rc) {
944 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
945 return rc;
946 }
947
948 if (!mCameraHandle) {
949 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
950 return -ENODEV;
951 }
952
953 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
954 camEvtHandle, (void *)this);
955
956 if (rc < 0) {
957 LOGE("Error, failed to register event callback");
958 /* Not closing camera here since it is already handled in destructor */
959 return FAILED_TRANSACTION;
960 }
961
962 mExifParams.debug_params =
963 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
964 if (mExifParams.debug_params) {
965 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
966 } else {
967 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
968 return NO_MEMORY;
969 }
970 mFirstConfiguration = true;
971
972 //Notify display HAL that a camera session is active.
973 //But avoid calling the same during bootup because camera service might open/close
974 //cameras at boot time during its initialization and display service will also internally
975 //wait for camera service to initialize first while calling this display API, resulting in a
976 //deadlock situation. Since boot time camera open/close calls are made only to fetch
977 //capabilities, no need of this display bw optimization.
978 //Use "service.bootanim.exit" property to know boot status.
979 property_get("service.bootanim.exit", value, "0");
980 if (atoi(value) == 1) {
981 pthread_mutex_lock(&gCamLock);
982 if (gNumCameraSessions++ == 0) {
983 setCameraLaunchStatus(true);
984 }
985 pthread_mutex_unlock(&gCamLock);
986 }
987
988 //fill the session id needed while linking dual cam
989 pthread_mutex_lock(&gCamLock);
990 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
991 &sessionId[mCameraId]);
992 pthread_mutex_unlock(&gCamLock);
993
994 if (rc < 0) {
995 LOGE("Error, failed to get sessiion id");
996 return UNKNOWN_ERROR;
997 } else {
998 //Allocate related cam sync buffer
999 //this is needed for the payload that goes along with bundling cmd for related
1000 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001001 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1002 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001003 if(rc != OK) {
1004 rc = NO_MEMORY;
1005 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1006 return NO_MEMORY;
1007 }
1008
1009 //Map memory for related cam sync buffer
1010 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001011 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1012 m_pDualCamCmdHeap->getFd(0),
1013 sizeof(cam_dual_camera_cmd_info_t),
1014 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001015 if(rc < 0) {
1016 LOGE("Dualcam: failed to map Related cam sync buffer");
1017 rc = FAILED_TRANSACTION;
1018 return NO_MEMORY;
1019 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001020 m_pDualCamCmdPtr =
1021 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001022 }
1023
1024 LOGH("mCameraId=%d",mCameraId);
1025
1026 return NO_ERROR;
1027}
1028
1029/*===========================================================================
1030 * FUNCTION : closeCamera
1031 *
1032 * DESCRIPTION: close camera
1033 *
1034 * PARAMETERS : none
1035 *
1036 * RETURN : int32_t type of status
1037 * NO_ERROR -- success
1038 * none-zero failure code
1039 *==========================================================================*/
1040int QCamera3HardwareInterface::closeCamera()
1041{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001042 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001043 int rc = NO_ERROR;
1044 char value[PROPERTY_VALUE_MAX];
1045
1046 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1047 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001048
1049 // unmap memory for related cam sync buffer
1050 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001051 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001052 if (NULL != m_pDualCamCmdHeap) {
1053 m_pDualCamCmdHeap->deallocate();
1054 delete m_pDualCamCmdHeap;
1055 m_pDualCamCmdHeap = NULL;
1056 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001057 }
1058
Thierry Strudel3d639192016-09-09 11:52:26 -07001059 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1060 mCameraHandle = NULL;
1061
1062 //reset session id to some invalid id
1063 pthread_mutex_lock(&gCamLock);
1064 sessionId[mCameraId] = 0xDEADBEEF;
1065 pthread_mutex_unlock(&gCamLock);
1066
1067 //Notify display HAL that there is no active camera session
1068 //but avoid calling the same during bootup. Refer to openCamera
1069 //for more details.
1070 property_get("service.bootanim.exit", value, "0");
1071 if (atoi(value) == 1) {
1072 pthread_mutex_lock(&gCamLock);
1073 if (--gNumCameraSessions == 0) {
1074 setCameraLaunchStatus(false);
1075 }
1076 pthread_mutex_unlock(&gCamLock);
1077 }
1078
Thierry Strudel3d639192016-09-09 11:52:26 -07001079 if (mExifParams.debug_params) {
1080 free(mExifParams.debug_params);
1081 mExifParams.debug_params = NULL;
1082 }
1083 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1084 LOGW("Failed to release flash for camera id: %d",
1085 mCameraId);
1086 }
1087 mState = CLOSED;
1088 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1089 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001090
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001091 {
1092 Mutex::Autolock l(gHdrPlusClientLock);
1093 if (gHdrPlusClient != nullptr) {
1094 // Disable HDR+ mode.
1095 disableHdrPlusModeLocked();
1096 // Disconnect Easel if it's connected.
1097 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1098 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001099 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001100
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001101 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001102 rc = gEaselManagerClient.stopMipi(mCameraId);
1103 if (rc != 0) {
1104 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1105 }
1106
1107 rc = gEaselManagerClient.suspend();
1108 if (rc != 0) {
1109 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1110 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001111 }
1112 }
1113
Thierry Strudel3d639192016-09-09 11:52:26 -07001114 return rc;
1115}
1116
1117/*===========================================================================
1118 * FUNCTION : initialize
1119 *
1120 * DESCRIPTION: Initialize frameworks callback functions
1121 *
1122 * PARAMETERS :
1123 * @callback_ops : callback function to frameworks
1124 *
1125 * RETURN :
1126 *
1127 *==========================================================================*/
1128int QCamera3HardwareInterface::initialize(
1129 const struct camera3_callback_ops *callback_ops)
1130{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001131 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001132 int rc;
1133
1134 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1135 pthread_mutex_lock(&mMutex);
1136
1137 // Validate current state
1138 switch (mState) {
1139 case OPENED:
1140 /* valid state */
1141 break;
1142 default:
1143 LOGE("Invalid state %d", mState);
1144 rc = -ENODEV;
1145 goto err1;
1146 }
1147
1148 rc = initParameters();
1149 if (rc < 0) {
1150 LOGE("initParamters failed %d", rc);
1151 goto err1;
1152 }
1153 mCallbackOps = callback_ops;
1154
1155 mChannelHandle = mCameraHandle->ops->add_channel(
1156 mCameraHandle->camera_handle, NULL, NULL, this);
1157 if (mChannelHandle == 0) {
1158 LOGE("add_channel failed");
1159 rc = -ENOMEM;
1160 pthread_mutex_unlock(&mMutex);
1161 return rc;
1162 }
1163
1164 pthread_mutex_unlock(&mMutex);
1165 mCameraInitialized = true;
1166 mState = INITIALIZED;
1167 LOGI("X");
1168 return 0;
1169
1170err1:
1171 pthread_mutex_unlock(&mMutex);
1172 return rc;
1173}
1174
1175/*===========================================================================
1176 * FUNCTION : validateStreamDimensions
1177 *
1178 * DESCRIPTION: Check if the configuration requested are those advertised
1179 *
1180 * PARAMETERS :
1181 * @stream_list : streams to be configured
1182 *
1183 * RETURN :
1184 *
1185 *==========================================================================*/
1186int QCamera3HardwareInterface::validateStreamDimensions(
1187 camera3_stream_configuration_t *streamList)
1188{
1189 int rc = NO_ERROR;
1190 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001191 uint32_t depthWidth = 0;
1192 uint32_t depthHeight = 0;
1193 if (mPDSupported) {
1194 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1195 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1196 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001197
1198 camera3_stream_t *inputStream = NULL;
1199 /*
1200 * Loop through all streams to find input stream if it exists*
1201 */
1202 for (size_t i = 0; i< streamList->num_streams; i++) {
1203 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1204 if (inputStream != NULL) {
1205 LOGE("Error, Multiple input streams requested");
1206 return -EINVAL;
1207 }
1208 inputStream = streamList->streams[i];
1209 }
1210 }
1211 /*
1212 * Loop through all streams requested in configuration
1213 * Check if unsupported sizes have been requested on any of them
1214 */
1215 for (size_t j = 0; j < streamList->num_streams; j++) {
1216 bool sizeFound = false;
1217 camera3_stream_t *newStream = streamList->streams[j];
1218
1219 uint32_t rotatedHeight = newStream->height;
1220 uint32_t rotatedWidth = newStream->width;
1221 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1222 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1223 rotatedHeight = newStream->width;
1224 rotatedWidth = newStream->height;
1225 }
1226
1227 /*
1228 * Sizes are different for each type of stream format check against
1229 * appropriate table.
1230 */
1231 switch (newStream->format) {
1232 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1233 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1234 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001235 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1236 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1237 mPDSupported) {
1238 if ((depthWidth == newStream->width) &&
1239 (depthHeight == newStream->height)) {
1240 sizeFound = true;
1241 }
1242 break;
1243 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001244 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1245 for (size_t i = 0; i < count; i++) {
1246 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1247 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1248 sizeFound = true;
1249 break;
1250 }
1251 }
1252 break;
1253 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001254 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1255 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001256 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001257 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001258 if ((depthSamplesCount == newStream->width) &&
1259 (1 == newStream->height)) {
1260 sizeFound = true;
1261 }
1262 break;
1263 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1265 /* Verify set size against generated sizes table */
1266 for (size_t i = 0; i < count; i++) {
1267 if (((int32_t)rotatedWidth ==
1268 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1269 ((int32_t)rotatedHeight ==
1270 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1271 sizeFound = true;
1272 break;
1273 }
1274 }
1275 break;
1276 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1277 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1278 default:
1279 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1280 || newStream->stream_type == CAMERA3_STREAM_INPUT
1281 || IS_USAGE_ZSL(newStream->usage)) {
1282 if (((int32_t)rotatedWidth ==
1283 gCamCapability[mCameraId]->active_array_size.width) &&
1284 ((int32_t)rotatedHeight ==
1285 gCamCapability[mCameraId]->active_array_size.height)) {
1286 sizeFound = true;
1287 break;
1288 }
1289 /* We could potentially break here to enforce ZSL stream
1290 * set from frameworks always is full active array size
1291 * but it is not clear from the spc if framework will always
1292 * follow that, also we have logic to override to full array
1293 * size, so keeping the logic lenient at the moment
1294 */
1295 }
1296 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1297 MAX_SIZES_CNT);
1298 for (size_t i = 0; i < count; i++) {
1299 if (((int32_t)rotatedWidth ==
1300 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1301 ((int32_t)rotatedHeight ==
1302 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1303 sizeFound = true;
1304 break;
1305 }
1306 }
1307 break;
1308 } /* End of switch(newStream->format) */
1309
1310 /* We error out even if a single stream has unsupported size set */
1311 if (!sizeFound) {
1312 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1313 rotatedWidth, rotatedHeight, newStream->format,
1314 gCamCapability[mCameraId]->active_array_size.width,
1315 gCamCapability[mCameraId]->active_array_size.height);
1316 rc = -EINVAL;
1317 break;
1318 }
1319 } /* End of for each stream */
1320 return rc;
1321}
1322
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001323/*===========================================================================
1324 * FUNCTION : validateUsageFlags
1325 *
1326 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1327 *
1328 * PARAMETERS :
1329 * @stream_list : streams to be configured
1330 *
1331 * RETURN :
1332 * NO_ERROR if the usage flags are supported
1333 * error code if usage flags are not supported
1334 *
1335 *==========================================================================*/
1336int QCamera3HardwareInterface::validateUsageFlags(
1337 const camera3_stream_configuration_t* streamList)
1338{
1339 for (size_t j = 0; j < streamList->num_streams; j++) {
1340 const camera3_stream_t *newStream = streamList->streams[j];
1341
1342 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1343 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1344 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1345 continue;
1346 }
1347
1348 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1349 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1350 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1351 bool forcePreviewUBWC = true;
1352 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1353 forcePreviewUBWC = false;
1354 }
1355 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1356 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1357 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1358 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1359 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1360 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1361
1362 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1363 // So color spaces will always match.
1364
1365 // Check whether underlying formats of shared streams match.
1366 if (isVideo && isPreview && videoFormat != previewFormat) {
1367 LOGE("Combined video and preview usage flag is not supported");
1368 return -EINVAL;
1369 }
1370 if (isPreview && isZSL && previewFormat != zslFormat) {
1371 LOGE("Combined preview and zsl usage flag is not supported");
1372 return -EINVAL;
1373 }
1374 if (isVideo && isZSL && videoFormat != zslFormat) {
1375 LOGE("Combined video and zsl usage flag is not supported");
1376 return -EINVAL;
1377 }
1378 }
1379 return NO_ERROR;
1380}
1381
1382/*===========================================================================
1383 * FUNCTION : validateUsageFlagsForEis
1384 *
1385 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1386 *
1387 * PARAMETERS :
1388 * @stream_list : streams to be configured
1389 *
1390 * RETURN :
1391 * NO_ERROR if the usage flags are supported
1392 * error code if usage flags are not supported
1393 *
1394 *==========================================================================*/
1395int QCamera3HardwareInterface::validateUsageFlagsForEis(
1396 const camera3_stream_configuration_t* streamList)
1397{
1398 for (size_t j = 0; j < streamList->num_streams; j++) {
1399 const camera3_stream_t *newStream = streamList->streams[j];
1400
1401 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1402 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1403
1404 // Because EIS is "hard-coded" for certain use case, and current
1405 // implementation doesn't support shared preview and video on the same
1406 // stream, return failure if EIS is forced on.
1407 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1408 LOGE("Combined video and preview usage flag is not supported due to EIS");
1409 return -EINVAL;
1410 }
1411 }
1412 return NO_ERROR;
1413}
1414
Thierry Strudel3d639192016-09-09 11:52:26 -07001415/*==============================================================================
1416 * FUNCTION : isSupportChannelNeeded
1417 *
1418 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1419 *
1420 * PARAMETERS :
1421 * @stream_list : streams to be configured
1422 * @stream_config_info : the config info for streams to be configured
1423 *
1424 * RETURN : Boolen true/false decision
1425 *
1426 *==========================================================================*/
1427bool QCamera3HardwareInterface::isSupportChannelNeeded(
1428 camera3_stream_configuration_t *streamList,
1429 cam_stream_size_info_t stream_config_info)
1430{
1431 uint32_t i;
1432 bool pprocRequested = false;
1433 /* Check for conditions where PProc pipeline does not have any streams*/
1434 for (i = 0; i < stream_config_info.num_streams; i++) {
1435 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1436 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1437 pprocRequested = true;
1438 break;
1439 }
1440 }
1441
1442 if (pprocRequested == false )
1443 return true;
1444
1445 /* Dummy stream needed if only raw or jpeg streams present */
1446 for (i = 0; i < streamList->num_streams; i++) {
1447 switch(streamList->streams[i]->format) {
1448 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1449 case HAL_PIXEL_FORMAT_RAW10:
1450 case HAL_PIXEL_FORMAT_RAW16:
1451 case HAL_PIXEL_FORMAT_BLOB:
1452 break;
1453 default:
1454 return false;
1455 }
1456 }
1457 return true;
1458}
1459
1460/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001461 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001462 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001463 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001464 *
1465 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001466 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001467 *
1468 * RETURN : int32_t type of status
1469 * NO_ERROR -- success
1470 * none-zero failure code
1471 *
1472 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001473int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001474{
1475 int32_t rc = NO_ERROR;
1476
1477 cam_dimension_t max_dim = {0, 0};
1478 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1479 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1480 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1481 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1482 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1483 }
1484
1485 clear_metadata_buffer(mParameters);
1486
1487 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1488 max_dim);
1489 if (rc != NO_ERROR) {
1490 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1491 return rc;
1492 }
1493
1494 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1495 if (rc != NO_ERROR) {
1496 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1497 return rc;
1498 }
1499
1500 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001501 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001502
1503 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1504 mParameters);
1505 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001506 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001507 return rc;
1508 }
1509
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001510 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001511 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1512 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1513 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1514 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1515 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001516
1517 return rc;
1518}
1519
1520/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001521 * FUNCTION : addToPPFeatureMask
1522 *
1523 * DESCRIPTION: add additional features to pp feature mask based on
1524 * stream type and usecase
1525 *
1526 * PARAMETERS :
1527 * @stream_format : stream type for feature mask
1528 * @stream_idx : stream idx within postprocess_mask list to change
1529 *
1530 * RETURN : NULL
1531 *
1532 *==========================================================================*/
1533void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1534 uint32_t stream_idx)
1535{
1536 char feature_mask_value[PROPERTY_VALUE_MAX];
1537 cam_feature_mask_t feature_mask;
1538 int args_converted;
1539 int property_len;
1540
1541 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001542#ifdef _LE_CAMERA_
1543 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1544 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1545 property_len = property_get("persist.camera.hal3.feature",
1546 feature_mask_value, swtnr_feature_mask_value);
1547#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001548 property_len = property_get("persist.camera.hal3.feature",
1549 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001550#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001551 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1552 (feature_mask_value[1] == 'x')) {
1553 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1554 } else {
1555 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1556 }
1557 if (1 != args_converted) {
1558 feature_mask = 0;
1559 LOGE("Wrong feature mask %s", feature_mask_value);
1560 return;
1561 }
1562
1563 switch (stream_format) {
1564 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1565 /* Add LLVD to pp feature mask only if video hint is enabled */
1566 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1567 mStreamConfigInfo.postprocess_mask[stream_idx]
1568 |= CAM_QTI_FEATURE_SW_TNR;
1569 LOGH("Added SW TNR to pp feature mask");
1570 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1571 mStreamConfigInfo.postprocess_mask[stream_idx]
1572 |= CAM_QCOM_FEATURE_LLVD;
1573 LOGH("Added LLVD SeeMore to pp feature mask");
1574 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001575 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1576 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1577 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1578 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001579 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1580 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1581 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1582 CAM_QTI_FEATURE_BINNING_CORRECTION;
1583 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001584 break;
1585 }
1586 default:
1587 break;
1588 }
1589 LOGD("PP feature mask %llx",
1590 mStreamConfigInfo.postprocess_mask[stream_idx]);
1591}
1592
1593/*==============================================================================
1594 * FUNCTION : updateFpsInPreviewBuffer
1595 *
1596 * DESCRIPTION: update FPS information in preview buffer.
1597 *
1598 * PARAMETERS :
1599 * @metadata : pointer to metadata buffer
1600 * @frame_number: frame_number to look for in pending buffer list
1601 *
1602 * RETURN : None
1603 *
1604 *==========================================================================*/
1605void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1606 uint32_t frame_number)
1607{
1608 // Mark all pending buffers for this particular request
1609 // with corresponding framerate information
1610 for (List<PendingBuffersInRequest>::iterator req =
1611 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1612 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1613 for(List<PendingBufferInfo>::iterator j =
1614 req->mPendingBufferList.begin();
1615 j != req->mPendingBufferList.end(); j++) {
1616 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1617 if ((req->frame_number == frame_number) &&
1618 (channel->getStreamTypeMask() &
1619 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1620 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1621 CAM_INTF_PARM_FPS_RANGE, metadata) {
1622 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1623 struct private_handle_t *priv_handle =
1624 (struct private_handle_t *)(*(j->buffer));
1625 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1626 }
1627 }
1628 }
1629 }
1630}
1631
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001632/*==============================================================================
1633 * FUNCTION : updateTimeStampInPendingBuffers
1634 *
1635 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1636 * of a frame number
1637 *
1638 * PARAMETERS :
1639 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1640 * @timestamp : timestamp to be set
1641 *
1642 * RETURN : None
1643 *
1644 *==========================================================================*/
1645void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1646 uint32_t frameNumber, nsecs_t timestamp)
1647{
1648 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1649 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1650 if (req->frame_number != frameNumber)
1651 continue;
1652
1653 for (auto k = req->mPendingBufferList.begin();
1654 k != req->mPendingBufferList.end(); k++ ) {
1655 struct private_handle_t *priv_handle =
1656 (struct private_handle_t *) (*(k->buffer));
1657 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1658 }
1659 }
1660 return;
1661}
1662
Thierry Strudel3d639192016-09-09 11:52:26 -07001663/*===========================================================================
1664 * FUNCTION : configureStreams
1665 *
1666 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1667 * and output streams.
1668 *
1669 * PARAMETERS :
1670 * @stream_list : streams to be configured
1671 *
1672 * RETURN :
1673 *
1674 *==========================================================================*/
1675int QCamera3HardwareInterface::configureStreams(
1676 camera3_stream_configuration_t *streamList)
1677{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001678 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001679 int rc = 0;
1680
1681 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001682 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001683 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001684 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001685
1686 return rc;
1687}
1688
1689/*===========================================================================
1690 * FUNCTION : configureStreamsPerfLocked
1691 *
1692 * DESCRIPTION: configureStreams while perfLock is held.
1693 *
1694 * PARAMETERS :
1695 * @stream_list : streams to be configured
1696 *
1697 * RETURN : int32_t type of status
1698 * NO_ERROR -- success
1699 * none-zero failure code
1700 *==========================================================================*/
1701int QCamera3HardwareInterface::configureStreamsPerfLocked(
1702 camera3_stream_configuration_t *streamList)
1703{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001704 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001705 int rc = 0;
1706
1707 // Sanity check stream_list
1708 if (streamList == NULL) {
1709 LOGE("NULL stream configuration");
1710 return BAD_VALUE;
1711 }
1712 if (streamList->streams == NULL) {
1713 LOGE("NULL stream list");
1714 return BAD_VALUE;
1715 }
1716
1717 if (streamList->num_streams < 1) {
1718 LOGE("Bad number of streams requested: %d",
1719 streamList->num_streams);
1720 return BAD_VALUE;
1721 }
1722
1723 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1724 LOGE("Maximum number of streams %d exceeded: %d",
1725 MAX_NUM_STREAMS, streamList->num_streams);
1726 return BAD_VALUE;
1727 }
1728
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001729 rc = validateUsageFlags(streamList);
1730 if (rc != NO_ERROR) {
1731 return rc;
1732 }
1733
Thierry Strudel3d639192016-09-09 11:52:26 -07001734 mOpMode = streamList->operation_mode;
1735 LOGD("mOpMode: %d", mOpMode);
1736
1737 /* first invalidate all the steams in the mStreamList
1738 * if they appear again, they will be validated */
1739 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1740 it != mStreamInfo.end(); it++) {
1741 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1742 if (channel) {
1743 channel->stop();
1744 }
1745 (*it)->status = INVALID;
1746 }
1747
1748 if (mRawDumpChannel) {
1749 mRawDumpChannel->stop();
1750 delete mRawDumpChannel;
1751 mRawDumpChannel = NULL;
1752 }
1753
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001754 if (mHdrPlusRawSrcChannel) {
1755 mHdrPlusRawSrcChannel->stop();
1756 delete mHdrPlusRawSrcChannel;
1757 mHdrPlusRawSrcChannel = NULL;
1758 }
1759
Thierry Strudel3d639192016-09-09 11:52:26 -07001760 if (mSupportChannel)
1761 mSupportChannel->stop();
1762
1763 if (mAnalysisChannel) {
1764 mAnalysisChannel->stop();
1765 }
1766 if (mMetadataChannel) {
1767 /* If content of mStreamInfo is not 0, there is metadata stream */
1768 mMetadataChannel->stop();
1769 }
1770 if (mChannelHandle) {
1771 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1772 mChannelHandle);
1773 LOGD("stopping channel %d", mChannelHandle);
1774 }
1775
1776 pthread_mutex_lock(&mMutex);
1777
1778 // Check state
1779 switch (mState) {
1780 case INITIALIZED:
1781 case CONFIGURED:
1782 case STARTED:
1783 /* valid state */
1784 break;
1785 default:
1786 LOGE("Invalid state %d", mState);
1787 pthread_mutex_unlock(&mMutex);
1788 return -ENODEV;
1789 }
1790
1791 /* Check whether we have video stream */
1792 m_bIs4KVideo = false;
1793 m_bIsVideo = false;
1794 m_bEisSupportedSize = false;
1795 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001796 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001797 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001798 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001799 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001800 uint32_t videoWidth = 0U;
1801 uint32_t videoHeight = 0U;
1802 size_t rawStreamCnt = 0;
1803 size_t stallStreamCnt = 0;
1804 size_t processedStreamCnt = 0;
1805 // Number of streams on ISP encoder path
1806 size_t numStreamsOnEncoder = 0;
1807 size_t numYuv888OnEncoder = 0;
1808 bool bYuv888OverrideJpeg = false;
1809 cam_dimension_t largeYuv888Size = {0, 0};
1810 cam_dimension_t maxViewfinderSize = {0, 0};
1811 bool bJpegExceeds4K = false;
1812 bool bJpegOnEncoder = false;
1813 bool bUseCommonFeatureMask = false;
1814 cam_feature_mask_t commonFeatureMask = 0;
1815 bool bSmallJpegSize = false;
1816 uint32_t width_ratio;
1817 uint32_t height_ratio;
1818 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1819 camera3_stream_t *inputStream = NULL;
1820 bool isJpeg = false;
1821 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001822 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001823 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001824
1825 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1826
1827 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001828 uint8_t eis_prop_set;
1829 uint32_t maxEisWidth = 0;
1830 uint32_t maxEisHeight = 0;
1831
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001832 // Initialize all instant AEC related variables
1833 mInstantAEC = false;
1834 mResetInstantAEC = false;
1835 mInstantAECSettledFrameNumber = 0;
1836 mAecSkipDisplayFrameBound = 0;
1837 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001838 mCurrFeatureState = 0;
1839 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001840
Thierry Strudel3d639192016-09-09 11:52:26 -07001841 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1842
1843 size_t count = IS_TYPE_MAX;
1844 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1845 for (size_t i = 0; i < count; i++) {
1846 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001847 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1848 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001849 break;
1850 }
1851 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001852
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001853 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 maxEisWidth = MAX_EIS_WIDTH;
1855 maxEisHeight = MAX_EIS_HEIGHT;
1856 }
1857
1858 /* EIS setprop control */
1859 char eis_prop[PROPERTY_VALUE_MAX];
1860 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001861 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001862 eis_prop_set = (uint8_t)atoi(eis_prop);
1863
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001864 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001865 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1866
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001867 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1868 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001869
Thierry Strudel3d639192016-09-09 11:52:26 -07001870 /* stream configurations */
1871 for (size_t i = 0; i < streamList->num_streams; i++) {
1872 camera3_stream_t *newStream = streamList->streams[i];
1873 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1874 "height = %d, rotation = %d, usage = 0x%x",
1875 i, newStream->stream_type, newStream->format,
1876 newStream->width, newStream->height, newStream->rotation,
1877 newStream->usage);
1878 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1879 newStream->stream_type == CAMERA3_STREAM_INPUT){
1880 isZsl = true;
1881 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001882 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1883 IS_USAGE_PREVIEW(newStream->usage)) {
1884 isPreview = true;
1885 }
1886
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1888 inputStream = newStream;
1889 }
1890
Emilian Peev7650c122017-01-19 08:24:33 -08001891 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1892 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001893 isJpeg = true;
1894 jpegSize.width = newStream->width;
1895 jpegSize.height = newStream->height;
1896 if (newStream->width > VIDEO_4K_WIDTH ||
1897 newStream->height > VIDEO_4K_HEIGHT)
1898 bJpegExceeds4K = true;
1899 }
1900
1901 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1902 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1903 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001904 // In HAL3 we can have multiple different video streams.
1905 // The variables video width and height are used below as
1906 // dimensions of the biggest of them
1907 if (videoWidth < newStream->width ||
1908 videoHeight < newStream->height) {
1909 videoWidth = newStream->width;
1910 videoHeight = newStream->height;
1911 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001912 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1913 (VIDEO_4K_HEIGHT <= newStream->height)) {
1914 m_bIs4KVideo = true;
1915 }
1916 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1917 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001918
Thierry Strudel3d639192016-09-09 11:52:26 -07001919 }
1920 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1921 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1922 switch (newStream->format) {
1923 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001924 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1925 depthPresent = true;
1926 break;
1927 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001928 stallStreamCnt++;
1929 if (isOnEncoder(maxViewfinderSize, newStream->width,
1930 newStream->height)) {
1931 numStreamsOnEncoder++;
1932 bJpegOnEncoder = true;
1933 }
1934 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1935 newStream->width);
1936 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1937 newStream->height);;
1938 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1939 "FATAL: max_downscale_factor cannot be zero and so assert");
1940 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1941 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1942 LOGH("Setting small jpeg size flag to true");
1943 bSmallJpegSize = true;
1944 }
1945 break;
1946 case HAL_PIXEL_FORMAT_RAW10:
1947 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1948 case HAL_PIXEL_FORMAT_RAW16:
1949 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001950 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1951 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1952 pdStatCount++;
1953 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001954 break;
1955 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1956 processedStreamCnt++;
1957 if (isOnEncoder(maxViewfinderSize, newStream->width,
1958 newStream->height)) {
1959 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1960 !IS_USAGE_ZSL(newStream->usage)) {
1961 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1962 }
1963 numStreamsOnEncoder++;
1964 }
1965 break;
1966 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1967 processedStreamCnt++;
1968 if (isOnEncoder(maxViewfinderSize, newStream->width,
1969 newStream->height)) {
1970 // If Yuv888 size is not greater than 4K, set feature mask
1971 // to SUPERSET so that it support concurrent request on
1972 // YUV and JPEG.
1973 if (newStream->width <= VIDEO_4K_WIDTH &&
1974 newStream->height <= VIDEO_4K_HEIGHT) {
1975 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1976 }
1977 numStreamsOnEncoder++;
1978 numYuv888OnEncoder++;
1979 largeYuv888Size.width = newStream->width;
1980 largeYuv888Size.height = newStream->height;
1981 }
1982 break;
1983 default:
1984 processedStreamCnt++;
1985 if (isOnEncoder(maxViewfinderSize, newStream->width,
1986 newStream->height)) {
1987 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1988 numStreamsOnEncoder++;
1989 }
1990 break;
1991 }
1992
1993 }
1994 }
1995
1996 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1997 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1998 !m_bIsVideo) {
1999 m_bEisEnable = false;
2000 }
2001
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002002 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2003 pthread_mutex_unlock(&mMutex);
2004 return -EINVAL;
2005 }
2006
Thierry Strudel54dc9782017-02-15 12:12:10 -08002007 uint8_t forceEnableTnr = 0;
2008 char tnr_prop[PROPERTY_VALUE_MAX];
2009 memset(tnr_prop, 0, sizeof(tnr_prop));
2010 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2011 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2012
Thierry Strudel3d639192016-09-09 11:52:26 -07002013 /* Logic to enable/disable TNR based on specific config size/etc.*/
2014 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002015 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2016 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002017 else if (forceEnableTnr)
2018 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002019
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002020 char videoHdrProp[PROPERTY_VALUE_MAX];
2021 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2022 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2023 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2024
2025 if (hdr_mode_prop == 1 && m_bIsVideo &&
2026 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2027 m_bVideoHdrEnabled = true;
2028 else
2029 m_bVideoHdrEnabled = false;
2030
2031
Thierry Strudel3d639192016-09-09 11:52:26 -07002032 /* Check if num_streams is sane */
2033 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2034 rawStreamCnt > MAX_RAW_STREAMS ||
2035 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2036 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2037 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2038 pthread_mutex_unlock(&mMutex);
2039 return -EINVAL;
2040 }
2041 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002042 if (isZsl && m_bIs4KVideo) {
2043 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002044 pthread_mutex_unlock(&mMutex);
2045 return -EINVAL;
2046 }
2047 /* Check if stream sizes are sane */
2048 if (numStreamsOnEncoder > 2) {
2049 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2050 pthread_mutex_unlock(&mMutex);
2051 return -EINVAL;
2052 } else if (1 < numStreamsOnEncoder){
2053 bUseCommonFeatureMask = true;
2054 LOGH("Multiple streams above max viewfinder size, common mask needed");
2055 }
2056
2057 /* Check if BLOB size is greater than 4k in 4k recording case */
2058 if (m_bIs4KVideo && bJpegExceeds4K) {
2059 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2060 pthread_mutex_unlock(&mMutex);
2061 return -EINVAL;
2062 }
2063
Emilian Peev7650c122017-01-19 08:24:33 -08002064 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2065 depthPresent) {
2066 LOGE("HAL doesn't support depth streams in HFR mode!");
2067 pthread_mutex_unlock(&mMutex);
2068 return -EINVAL;
2069 }
2070
Thierry Strudel3d639192016-09-09 11:52:26 -07002071 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2072 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2073 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2074 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2075 // configurations:
2076 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2077 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2078 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2079 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2080 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2081 __func__);
2082 pthread_mutex_unlock(&mMutex);
2083 return -EINVAL;
2084 }
2085
2086 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2087 // the YUV stream's size is greater or equal to the JPEG size, set common
2088 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2089 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2090 jpegSize.width, jpegSize.height) &&
2091 largeYuv888Size.width > jpegSize.width &&
2092 largeYuv888Size.height > jpegSize.height) {
2093 bYuv888OverrideJpeg = true;
2094 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2095 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2096 }
2097
2098 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2099 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2100 commonFeatureMask);
2101 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2102 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2103
2104 rc = validateStreamDimensions(streamList);
2105 if (rc == NO_ERROR) {
2106 rc = validateStreamRotations(streamList);
2107 }
2108 if (rc != NO_ERROR) {
2109 LOGE("Invalid stream configuration requested!");
2110 pthread_mutex_unlock(&mMutex);
2111 return rc;
2112 }
2113
Emilian Peev0f3c3162017-03-15 12:57:46 +00002114 if (1 < pdStatCount) {
2115 LOGE("HAL doesn't support multiple PD streams");
2116 pthread_mutex_unlock(&mMutex);
2117 return -EINVAL;
2118 }
2119
2120 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2121 (1 == pdStatCount)) {
2122 LOGE("HAL doesn't support PD streams in HFR mode!");
2123 pthread_mutex_unlock(&mMutex);
2124 return -EINVAL;
2125 }
2126
Thierry Strudel3d639192016-09-09 11:52:26 -07002127 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2128 for (size_t i = 0; i < streamList->num_streams; i++) {
2129 camera3_stream_t *newStream = streamList->streams[i];
2130 LOGH("newStream type = %d, stream format = %d "
2131 "stream size : %d x %d, stream rotation = %d",
2132 newStream->stream_type, newStream->format,
2133 newStream->width, newStream->height, newStream->rotation);
2134 //if the stream is in the mStreamList validate it
2135 bool stream_exists = false;
2136 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2137 it != mStreamInfo.end(); it++) {
2138 if ((*it)->stream == newStream) {
2139 QCamera3ProcessingChannel *channel =
2140 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2141 stream_exists = true;
2142 if (channel)
2143 delete channel;
2144 (*it)->status = VALID;
2145 (*it)->stream->priv = NULL;
2146 (*it)->channel = NULL;
2147 }
2148 }
2149 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2150 //new stream
2151 stream_info_t* stream_info;
2152 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2153 if (!stream_info) {
2154 LOGE("Could not allocate stream info");
2155 rc = -ENOMEM;
2156 pthread_mutex_unlock(&mMutex);
2157 return rc;
2158 }
2159 stream_info->stream = newStream;
2160 stream_info->status = VALID;
2161 stream_info->channel = NULL;
2162 mStreamInfo.push_back(stream_info);
2163 }
2164 /* Covers Opaque ZSL and API1 F/W ZSL */
2165 if (IS_USAGE_ZSL(newStream->usage)
2166 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2167 if (zslStream != NULL) {
2168 LOGE("Multiple input/reprocess streams requested!");
2169 pthread_mutex_unlock(&mMutex);
2170 return BAD_VALUE;
2171 }
2172 zslStream = newStream;
2173 }
2174 /* Covers YUV reprocess */
2175 if (inputStream != NULL) {
2176 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2177 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2178 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2179 && inputStream->width == newStream->width
2180 && inputStream->height == newStream->height) {
2181 if (zslStream != NULL) {
2182 /* This scenario indicates multiple YUV streams with same size
2183 * as input stream have been requested, since zsl stream handle
2184 * is solely use for the purpose of overriding the size of streams
2185 * which share h/w streams we will just make a guess here as to
2186 * which of the stream is a ZSL stream, this will be refactored
2187 * once we make generic logic for streams sharing encoder output
2188 */
2189 LOGH("Warning, Multiple ip/reprocess streams requested!");
2190 }
2191 zslStream = newStream;
2192 }
2193 }
2194 }
2195
2196 /* If a zsl stream is set, we know that we have configured at least one input or
2197 bidirectional stream */
2198 if (NULL != zslStream) {
2199 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2200 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2201 mInputStreamInfo.format = zslStream->format;
2202 mInputStreamInfo.usage = zslStream->usage;
2203 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2204 mInputStreamInfo.dim.width,
2205 mInputStreamInfo.dim.height,
2206 mInputStreamInfo.format, mInputStreamInfo.usage);
2207 }
2208
2209 cleanAndSortStreamInfo();
2210 if (mMetadataChannel) {
2211 delete mMetadataChannel;
2212 mMetadataChannel = NULL;
2213 }
2214 if (mSupportChannel) {
2215 delete mSupportChannel;
2216 mSupportChannel = NULL;
2217 }
2218
2219 if (mAnalysisChannel) {
2220 delete mAnalysisChannel;
2221 mAnalysisChannel = NULL;
2222 }
2223
2224 if (mDummyBatchChannel) {
2225 delete mDummyBatchChannel;
2226 mDummyBatchChannel = NULL;
2227 }
2228
Emilian Peev7650c122017-01-19 08:24:33 -08002229 if (mDepthChannel) {
2230 mDepthChannel = NULL;
2231 }
2232
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002233 mShutterDispatcher.clear();
2234 mOutputBufferDispatcher.clear();
2235
Thierry Strudel2896d122017-02-23 19:18:03 -08002236 char is_type_value[PROPERTY_VALUE_MAX];
2237 property_get("persist.camera.is_type", is_type_value, "4");
2238 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2239
Binhao Line406f062017-05-03 14:39:44 -07002240 char property_value[PROPERTY_VALUE_MAX];
2241 property_get("persist.camera.gzoom.at", property_value, "0");
2242 int goog_zoom_at = atoi(property_value);
2243 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2244 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2245
2246 property_get("persist.camera.gzoom.4k", property_value, "0");
2247 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2248
Thierry Strudel3d639192016-09-09 11:52:26 -07002249 //Create metadata channel and initialize it
2250 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2251 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2252 gCamCapability[mCameraId]->color_arrangement);
2253 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2254 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002255 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002256 if (mMetadataChannel == NULL) {
2257 LOGE("failed to allocate metadata channel");
2258 rc = -ENOMEM;
2259 pthread_mutex_unlock(&mMutex);
2260 return rc;
2261 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002262 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002263 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2264 if (rc < 0) {
2265 LOGE("metadata channel initialization failed");
2266 delete mMetadataChannel;
2267 mMetadataChannel = NULL;
2268 pthread_mutex_unlock(&mMutex);
2269 return rc;
2270 }
2271
Thierry Strudel2896d122017-02-23 19:18:03 -08002272 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002273 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002274 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002275 // Keep track of preview/video streams indices.
2276 // There could be more than one preview streams, but only one video stream.
2277 int32_t video_stream_idx = -1;
2278 int32_t preview_stream_idx[streamList->num_streams];
2279 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002280 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2281 /* Allocate channel objects for the requested streams */
2282 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002283
Thierry Strudel3d639192016-09-09 11:52:26 -07002284 camera3_stream_t *newStream = streamList->streams[i];
2285 uint32_t stream_usage = newStream->usage;
2286 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2287 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2288 struct camera_info *p_info = NULL;
2289 pthread_mutex_lock(&gCamLock);
2290 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2291 pthread_mutex_unlock(&gCamLock);
2292 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2293 || IS_USAGE_ZSL(newStream->usage)) &&
2294 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002295 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002296 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002297 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2298 if (bUseCommonFeatureMask)
2299 zsl_ppmask = commonFeatureMask;
2300 else
2301 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002302 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002303 if (numStreamsOnEncoder > 0)
2304 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2305 else
2306 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002307 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002308 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002309 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002310 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002311 LOGH("Input stream configured, reprocess config");
2312 } else {
2313 //for non zsl streams find out the format
2314 switch (newStream->format) {
2315 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2316 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002317 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002318 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2319 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2320 /* add additional features to pp feature mask */
2321 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2322 mStreamConfigInfo.num_streams);
2323
2324 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2325 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2326 CAM_STREAM_TYPE_VIDEO;
2327 if (m_bTnrEnabled && m_bTnrVideo) {
2328 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2329 CAM_QCOM_FEATURE_CPP_TNR;
2330 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2331 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2332 ~CAM_QCOM_FEATURE_CDS;
2333 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002334 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2335 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2336 CAM_QTI_FEATURE_PPEISCORE;
2337 }
Binhao Line406f062017-05-03 14:39:44 -07002338 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2339 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2340 CAM_QCOM_FEATURE_GOOG_ZOOM;
2341 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002342 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002343 } else {
2344 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2345 CAM_STREAM_TYPE_PREVIEW;
2346 if (m_bTnrEnabled && m_bTnrPreview) {
2347 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2348 CAM_QCOM_FEATURE_CPP_TNR;
2349 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2350 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2351 ~CAM_QCOM_FEATURE_CDS;
2352 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002353 if(!m_bSwTnrPreview) {
2354 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2355 ~CAM_QTI_FEATURE_SW_TNR;
2356 }
Binhao Line406f062017-05-03 14:39:44 -07002357 if (is_goog_zoom_preview_enabled) {
2358 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2359 CAM_QCOM_FEATURE_GOOG_ZOOM;
2360 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002361 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002362 padding_info.width_padding = mSurfaceStridePadding;
2363 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002364 previewSize.width = (int32_t)newStream->width;
2365 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002366 }
2367 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2368 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2369 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2370 newStream->height;
2371 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2372 newStream->width;
2373 }
2374 }
2375 break;
2376 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002377 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002378 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2379 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2380 if (bUseCommonFeatureMask)
2381 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2382 commonFeatureMask;
2383 else
2384 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2385 CAM_QCOM_FEATURE_NONE;
2386 } else {
2387 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2388 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2389 }
2390 break;
2391 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002392 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002393 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2394 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2395 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2396 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2397 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002398 /* Remove rotation if it is not supported
2399 for 4K LiveVideo snapshot case (online processing) */
2400 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2401 CAM_QCOM_FEATURE_ROTATION)) {
2402 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2403 &= ~CAM_QCOM_FEATURE_ROTATION;
2404 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002405 } else {
2406 if (bUseCommonFeatureMask &&
2407 isOnEncoder(maxViewfinderSize, newStream->width,
2408 newStream->height)) {
2409 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2410 } else {
2411 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2412 }
2413 }
2414 if (isZsl) {
2415 if (zslStream) {
2416 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2417 (int32_t)zslStream->width;
2418 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2419 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002420 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2421 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002422 } else {
2423 LOGE("Error, No ZSL stream identified");
2424 pthread_mutex_unlock(&mMutex);
2425 return -EINVAL;
2426 }
2427 } else if (m_bIs4KVideo) {
2428 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2429 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2430 } else if (bYuv888OverrideJpeg) {
2431 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2432 (int32_t)largeYuv888Size.width;
2433 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2434 (int32_t)largeYuv888Size.height;
2435 }
2436 break;
2437 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2438 case HAL_PIXEL_FORMAT_RAW16:
2439 case HAL_PIXEL_FORMAT_RAW10:
2440 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2441 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2442 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002443 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2444 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2445 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2446 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2447 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2448 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2449 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2450 gCamCapability[mCameraId]->dt[mPDIndex];
2451 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2452 gCamCapability[mCameraId]->vc[mPDIndex];
2453 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002454 break;
2455 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002456 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002457 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2459 break;
2460 }
2461 }
2462
2463 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2464 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2465 gCamCapability[mCameraId]->color_arrangement);
2466
2467 if (newStream->priv == NULL) {
2468 //New stream, construct channel
2469 switch (newStream->stream_type) {
2470 case CAMERA3_STREAM_INPUT:
2471 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2472 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2473 break;
2474 case CAMERA3_STREAM_BIDIRECTIONAL:
2475 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2476 GRALLOC_USAGE_HW_CAMERA_WRITE;
2477 break;
2478 case CAMERA3_STREAM_OUTPUT:
2479 /* For video encoding stream, set read/write rarely
2480 * flag so that they may be set to un-cached */
2481 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2482 newStream->usage |=
2483 (GRALLOC_USAGE_SW_READ_RARELY |
2484 GRALLOC_USAGE_SW_WRITE_RARELY |
2485 GRALLOC_USAGE_HW_CAMERA_WRITE);
2486 else if (IS_USAGE_ZSL(newStream->usage))
2487 {
2488 LOGD("ZSL usage flag skipping");
2489 }
2490 else if (newStream == zslStream
2491 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2492 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2493 } else
2494 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2495 break;
2496 default:
2497 LOGE("Invalid stream_type %d", newStream->stream_type);
2498 break;
2499 }
2500
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002501 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002502 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2503 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2504 QCamera3ProcessingChannel *channel = NULL;
2505 switch (newStream->format) {
2506 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2507 if ((newStream->usage &
2508 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2509 (streamList->operation_mode ==
2510 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2511 ) {
2512 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2513 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002514 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002515 this,
2516 newStream,
2517 (cam_stream_type_t)
2518 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2519 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2520 mMetadataChannel,
2521 0); //heap buffers are not required for HFR video channel
2522 if (channel == NULL) {
2523 LOGE("allocation of channel failed");
2524 pthread_mutex_unlock(&mMutex);
2525 return -ENOMEM;
2526 }
2527 //channel->getNumBuffers() will return 0 here so use
2528 //MAX_INFLIGH_HFR_REQUESTS
2529 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2530 newStream->priv = channel;
2531 LOGI("num video buffers in HFR mode: %d",
2532 MAX_INFLIGHT_HFR_REQUESTS);
2533 } else {
2534 /* Copy stream contents in HFR preview only case to create
2535 * dummy batch channel so that sensor streaming is in
2536 * HFR mode */
2537 if (!m_bIsVideo && (streamList->operation_mode ==
2538 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2539 mDummyBatchStream = *newStream;
2540 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002541 int bufferCount = MAX_INFLIGHT_REQUESTS;
2542 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2543 CAM_STREAM_TYPE_VIDEO) {
2544 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2545 bufferCount = MAX_VIDEO_BUFFERS;
2546 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002547 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2548 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002549 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002550 this,
2551 newStream,
2552 (cam_stream_type_t)
2553 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2554 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2555 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002556 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002557 if (channel == NULL) {
2558 LOGE("allocation of channel failed");
2559 pthread_mutex_unlock(&mMutex);
2560 return -ENOMEM;
2561 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002562 /* disable UBWC for preview, though supported,
2563 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002564 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002565 (previewSize.width == (int32_t)videoWidth)&&
2566 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002567 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002568 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002569 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002570 /* When goog_zoom is linked to the preview or video stream,
2571 * disable ubwc to the linked stream */
2572 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2573 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2574 channel->setUBWCEnabled(false);
2575 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002576 newStream->max_buffers = channel->getNumBuffers();
2577 newStream->priv = channel;
2578 }
2579 break;
2580 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2581 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2582 mChannelHandle,
2583 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002584 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002585 this,
2586 newStream,
2587 (cam_stream_type_t)
2588 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2589 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2590 mMetadataChannel);
2591 if (channel == NULL) {
2592 LOGE("allocation of YUV channel failed");
2593 pthread_mutex_unlock(&mMutex);
2594 return -ENOMEM;
2595 }
2596 newStream->max_buffers = channel->getNumBuffers();
2597 newStream->priv = channel;
2598 break;
2599 }
2600 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2601 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002602 case HAL_PIXEL_FORMAT_RAW10: {
2603 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2604 (HAL_DATASPACE_DEPTH != newStream->data_space))
2605 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002606 mRawChannel = new QCamera3RawChannel(
2607 mCameraHandle->camera_handle, mChannelHandle,
2608 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002609 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002610 this, newStream,
2611 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002612 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002613 if (mRawChannel == NULL) {
2614 LOGE("allocation of raw channel failed");
2615 pthread_mutex_unlock(&mMutex);
2616 return -ENOMEM;
2617 }
2618 newStream->max_buffers = mRawChannel->getNumBuffers();
2619 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2620 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002621 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002622 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002623 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2624 mDepthChannel = new QCamera3DepthChannel(
2625 mCameraHandle->camera_handle, mChannelHandle,
2626 mCameraHandle->ops, NULL, NULL, &padding_info,
2627 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2628 mMetadataChannel);
2629 if (NULL == mDepthChannel) {
2630 LOGE("Allocation of depth channel failed");
2631 pthread_mutex_unlock(&mMutex);
2632 return NO_MEMORY;
2633 }
2634 newStream->priv = mDepthChannel;
2635 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2636 } else {
2637 // Max live snapshot inflight buffer is 1. This is to mitigate
2638 // frame drop issues for video snapshot. The more buffers being
2639 // allocated, the more frame drops there are.
2640 mPictureChannel = new QCamera3PicChannel(
2641 mCameraHandle->camera_handle, mChannelHandle,
2642 mCameraHandle->ops, captureResultCb,
2643 setBufferErrorStatus, &padding_info, this, newStream,
2644 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2645 m_bIs4KVideo, isZsl, mMetadataChannel,
2646 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2647 if (mPictureChannel == NULL) {
2648 LOGE("allocation of channel failed");
2649 pthread_mutex_unlock(&mMutex);
2650 return -ENOMEM;
2651 }
2652 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2653 newStream->max_buffers = mPictureChannel->getNumBuffers();
2654 mPictureChannel->overrideYuvSize(
2655 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2656 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002657 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002658 break;
2659
2660 default:
2661 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002662 pthread_mutex_unlock(&mMutex);
2663 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002664 }
2665 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2666 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2667 } else {
2668 LOGE("Error, Unknown stream type");
2669 pthread_mutex_unlock(&mMutex);
2670 return -EINVAL;
2671 }
2672
2673 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002674 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2675 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002676 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002677 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002678 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2679 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2680 }
2681 }
2682
2683 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2684 it != mStreamInfo.end(); it++) {
2685 if ((*it)->stream == newStream) {
2686 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2687 break;
2688 }
2689 }
2690 } else {
2691 // Channel already exists for this stream
2692 // Do nothing for now
2693 }
2694 padding_info = gCamCapability[mCameraId]->padding_info;
2695
Emilian Peev7650c122017-01-19 08:24:33 -08002696 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002697 * since there is no real stream associated with it
2698 */
Emilian Peev7650c122017-01-19 08:24:33 -08002699 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002700 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2701 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002702 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002703 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002704 }
2705
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002706 // Let buffer dispatcher know the configured streams.
2707 mOutputBufferDispatcher.configureStreams(streamList);
2708
Binhao Lincdb362a2017-04-20 13:31:54 -07002709 // By default, preview stream TNR is disabled.
2710 // Enable TNR to the preview stream if all conditions below are satisfied:
2711 // 1. resolution <= 1080p.
2712 // 2. preview resolution == video resolution.
2713 // 3. video stream TNR is enabled.
2714 // 4. EIS2.0
2715 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2716 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2717 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2718 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2719 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2720 video_stream->width == preview_stream->width &&
2721 video_stream->height == preview_stream->height) {
2722 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2723 CAM_QCOM_FEATURE_CPP_TNR;
2724 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2725 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2726 ~CAM_QCOM_FEATURE_CDS;
2727 }
2728 }
2729
Thierry Strudel2896d122017-02-23 19:18:03 -08002730 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2731 onlyRaw = false;
2732 }
2733
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002734 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002735 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002736 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002737 cam_analysis_info_t analysisInfo;
2738 int32_t ret = NO_ERROR;
2739 ret = mCommon.getAnalysisInfo(
2740 FALSE,
2741 analysisFeatureMask,
2742 &analysisInfo);
2743 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002744 cam_color_filter_arrangement_t analysis_color_arrangement =
2745 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2746 CAM_FILTER_ARRANGEMENT_Y :
2747 gCamCapability[mCameraId]->color_arrangement);
2748 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2749 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002750 cam_dimension_t analysisDim;
2751 analysisDim = mCommon.getMatchingDimension(previewSize,
2752 analysisInfo.analysis_recommended_res);
2753
2754 mAnalysisChannel = new QCamera3SupportChannel(
2755 mCameraHandle->camera_handle,
2756 mChannelHandle,
2757 mCameraHandle->ops,
2758 &analysisInfo.analysis_padding_info,
2759 analysisFeatureMask,
2760 CAM_STREAM_TYPE_ANALYSIS,
2761 &analysisDim,
2762 (analysisInfo.analysis_format
2763 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2764 : CAM_FORMAT_YUV_420_NV21),
2765 analysisInfo.hw_analysis_supported,
2766 gCamCapability[mCameraId]->color_arrangement,
2767 this,
2768 0); // force buffer count to 0
2769 } else {
2770 LOGW("getAnalysisInfo failed, ret = %d", ret);
2771 }
2772 if (!mAnalysisChannel) {
2773 LOGW("Analysis channel cannot be created");
2774 }
2775 }
2776
Thierry Strudel3d639192016-09-09 11:52:26 -07002777 //RAW DUMP channel
2778 if (mEnableRawDump && isRawStreamRequested == false){
2779 cam_dimension_t rawDumpSize;
2780 rawDumpSize = getMaxRawSize(mCameraId);
2781 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2782 setPAAFSupport(rawDumpFeatureMask,
2783 CAM_STREAM_TYPE_RAW,
2784 gCamCapability[mCameraId]->color_arrangement);
2785 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2786 mChannelHandle,
2787 mCameraHandle->ops,
2788 rawDumpSize,
2789 &padding_info,
2790 this, rawDumpFeatureMask);
2791 if (!mRawDumpChannel) {
2792 LOGE("Raw Dump channel cannot be created");
2793 pthread_mutex_unlock(&mMutex);
2794 return -ENOMEM;
2795 }
2796 }
2797
Thierry Strudel3d639192016-09-09 11:52:26 -07002798 if (mAnalysisChannel) {
2799 cam_analysis_info_t analysisInfo;
2800 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2801 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2802 CAM_STREAM_TYPE_ANALYSIS;
2803 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2804 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002805 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002806 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2807 &analysisInfo);
2808 if (rc != NO_ERROR) {
2809 LOGE("getAnalysisInfo failed, ret = %d", rc);
2810 pthread_mutex_unlock(&mMutex);
2811 return rc;
2812 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002813 cam_color_filter_arrangement_t analysis_color_arrangement =
2814 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2815 CAM_FILTER_ARRANGEMENT_Y :
2816 gCamCapability[mCameraId]->color_arrangement);
2817 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2818 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2819 analysis_color_arrangement);
2820
Thierry Strudel3d639192016-09-09 11:52:26 -07002821 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002822 mCommon.getMatchingDimension(previewSize,
2823 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002824 mStreamConfigInfo.num_streams++;
2825 }
2826
Thierry Strudel2896d122017-02-23 19:18:03 -08002827 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002828 cam_analysis_info_t supportInfo;
2829 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2830 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2831 setPAAFSupport(callbackFeatureMask,
2832 CAM_STREAM_TYPE_CALLBACK,
2833 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002834 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002835 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002836 if (ret != NO_ERROR) {
2837 /* Ignore the error for Mono camera
2838 * because the PAAF bit mask is only set
2839 * for CAM_STREAM_TYPE_ANALYSIS stream type
2840 */
2841 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2842 LOGW("getAnalysisInfo failed, ret = %d", ret);
2843 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002844 }
2845 mSupportChannel = new QCamera3SupportChannel(
2846 mCameraHandle->camera_handle,
2847 mChannelHandle,
2848 mCameraHandle->ops,
2849 &gCamCapability[mCameraId]->padding_info,
2850 callbackFeatureMask,
2851 CAM_STREAM_TYPE_CALLBACK,
2852 &QCamera3SupportChannel::kDim,
2853 CAM_FORMAT_YUV_420_NV21,
2854 supportInfo.hw_analysis_supported,
2855 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002856 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002857 if (!mSupportChannel) {
2858 LOGE("dummy channel cannot be created");
2859 pthread_mutex_unlock(&mMutex);
2860 return -ENOMEM;
2861 }
2862 }
2863
2864 if (mSupportChannel) {
2865 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2866 QCamera3SupportChannel::kDim;
2867 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2868 CAM_STREAM_TYPE_CALLBACK;
2869 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2870 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2871 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2872 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2873 gCamCapability[mCameraId]->color_arrangement);
2874 mStreamConfigInfo.num_streams++;
2875 }
2876
2877 if (mRawDumpChannel) {
2878 cam_dimension_t rawSize;
2879 rawSize = getMaxRawSize(mCameraId);
2880 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2881 rawSize;
2882 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2883 CAM_STREAM_TYPE_RAW;
2884 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2885 CAM_QCOM_FEATURE_NONE;
2886 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2887 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2888 gCamCapability[mCameraId]->color_arrangement);
2889 mStreamConfigInfo.num_streams++;
2890 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002891
2892 if (mHdrPlusRawSrcChannel) {
2893 cam_dimension_t rawSize;
2894 rawSize = getMaxRawSize(mCameraId);
2895 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2896 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2897 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2898 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2899 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2900 gCamCapability[mCameraId]->color_arrangement);
2901 mStreamConfigInfo.num_streams++;
2902 }
2903
Thierry Strudel3d639192016-09-09 11:52:26 -07002904 /* In HFR mode, if video stream is not added, create a dummy channel so that
2905 * ISP can create a batch mode even for preview only case. This channel is
2906 * never 'start'ed (no stream-on), it is only 'initialized' */
2907 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2908 !m_bIsVideo) {
2909 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2910 setPAAFSupport(dummyFeatureMask,
2911 CAM_STREAM_TYPE_VIDEO,
2912 gCamCapability[mCameraId]->color_arrangement);
2913 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2914 mChannelHandle,
2915 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002916 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002917 this,
2918 &mDummyBatchStream,
2919 CAM_STREAM_TYPE_VIDEO,
2920 dummyFeatureMask,
2921 mMetadataChannel);
2922 if (NULL == mDummyBatchChannel) {
2923 LOGE("creation of mDummyBatchChannel failed."
2924 "Preview will use non-hfr sensor mode ");
2925 }
2926 }
2927 if (mDummyBatchChannel) {
2928 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2929 mDummyBatchStream.width;
2930 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2931 mDummyBatchStream.height;
2932 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2933 CAM_STREAM_TYPE_VIDEO;
2934 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2935 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2936 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2937 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2938 gCamCapability[mCameraId]->color_arrangement);
2939 mStreamConfigInfo.num_streams++;
2940 }
2941
2942 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2943 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002944 m_bIs4KVideo ? 0 :
2945 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002946
2947 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2948 for (pendingRequestIterator i = mPendingRequestsList.begin();
2949 i != mPendingRequestsList.end();) {
2950 i = erasePendingRequest(i);
2951 }
2952 mPendingFrameDropList.clear();
2953 // Initialize/Reset the pending buffers list
2954 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2955 req.mPendingBufferList.clear();
2956 }
2957 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2958
Thierry Strudel3d639192016-09-09 11:52:26 -07002959 mCurJpegMeta.clear();
2960 //Get min frame duration for this streams configuration
2961 deriveMinFrameDuration();
2962
Chien-Yu Chenee335912017-02-09 17:53:20 -08002963 mFirstPreviewIntentSeen = false;
2964
2965 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002966 {
2967 Mutex::Autolock l(gHdrPlusClientLock);
2968 disableHdrPlusModeLocked();
2969 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002970
Thierry Strudel3d639192016-09-09 11:52:26 -07002971 // Update state
2972 mState = CONFIGURED;
2973
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002974 mFirstMetadataCallback = true;
2975
Thierry Strudel3d639192016-09-09 11:52:26 -07002976 pthread_mutex_unlock(&mMutex);
2977
2978 return rc;
2979}
2980
2981/*===========================================================================
2982 * FUNCTION : validateCaptureRequest
2983 *
2984 * DESCRIPTION: validate a capture request from camera service
2985 *
2986 * PARAMETERS :
2987 * @request : request from framework to process
2988 *
2989 * RETURN :
2990 *
2991 *==========================================================================*/
2992int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002993 camera3_capture_request_t *request,
2994 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002995{
2996 ssize_t idx = 0;
2997 const camera3_stream_buffer_t *b;
2998 CameraMetadata meta;
2999
3000 /* Sanity check the request */
3001 if (request == NULL) {
3002 LOGE("NULL capture request");
3003 return BAD_VALUE;
3004 }
3005
3006 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3007 /*settings cannot be null for the first request*/
3008 return BAD_VALUE;
3009 }
3010
3011 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003012 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3013 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003014 LOGE("Request %d: No output buffers provided!",
3015 __FUNCTION__, frameNumber);
3016 return BAD_VALUE;
3017 }
3018 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3019 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3020 request->num_output_buffers, MAX_NUM_STREAMS);
3021 return BAD_VALUE;
3022 }
3023 if (request->input_buffer != NULL) {
3024 b = request->input_buffer;
3025 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3026 LOGE("Request %d: Buffer %ld: Status not OK!",
3027 frameNumber, (long)idx);
3028 return BAD_VALUE;
3029 }
3030 if (b->release_fence != -1) {
3031 LOGE("Request %d: Buffer %ld: Has a release fence!",
3032 frameNumber, (long)idx);
3033 return BAD_VALUE;
3034 }
3035 if (b->buffer == NULL) {
3036 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3037 frameNumber, (long)idx);
3038 return BAD_VALUE;
3039 }
3040 }
3041
3042 // Validate all buffers
3043 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003044 if (b == NULL) {
3045 return BAD_VALUE;
3046 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003047 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003048 QCamera3ProcessingChannel *channel =
3049 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3050 if (channel == NULL) {
3051 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3052 frameNumber, (long)idx);
3053 return BAD_VALUE;
3054 }
3055 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3056 LOGE("Request %d: Buffer %ld: Status not OK!",
3057 frameNumber, (long)idx);
3058 return BAD_VALUE;
3059 }
3060 if (b->release_fence != -1) {
3061 LOGE("Request %d: Buffer %ld: Has a release fence!",
3062 frameNumber, (long)idx);
3063 return BAD_VALUE;
3064 }
3065 if (b->buffer == NULL) {
3066 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3067 frameNumber, (long)idx);
3068 return BAD_VALUE;
3069 }
3070 if (*(b->buffer) == NULL) {
3071 LOGE("Request %d: Buffer %ld: NULL private handle!",
3072 frameNumber, (long)idx);
3073 return BAD_VALUE;
3074 }
3075 idx++;
3076 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003077 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003078 return NO_ERROR;
3079}
3080
3081/*===========================================================================
3082 * FUNCTION : deriveMinFrameDuration
3083 *
3084 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3085 * on currently configured streams.
3086 *
3087 * PARAMETERS : NONE
3088 *
3089 * RETURN : NONE
3090 *
3091 *==========================================================================*/
3092void QCamera3HardwareInterface::deriveMinFrameDuration()
3093{
3094 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3095
3096 maxJpegDim = 0;
3097 maxProcessedDim = 0;
3098 maxRawDim = 0;
3099
3100 // Figure out maximum jpeg, processed, and raw dimensions
3101 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3102 it != mStreamInfo.end(); it++) {
3103
3104 // Input stream doesn't have valid stream_type
3105 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3106 continue;
3107
3108 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3109 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3110 if (dimension > maxJpegDim)
3111 maxJpegDim = dimension;
3112 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3113 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3114 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3115 if (dimension > maxRawDim)
3116 maxRawDim = dimension;
3117 } else {
3118 if (dimension > maxProcessedDim)
3119 maxProcessedDim = dimension;
3120 }
3121 }
3122
3123 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3124 MAX_SIZES_CNT);
3125
3126 //Assume all jpeg dimensions are in processed dimensions.
3127 if (maxJpegDim > maxProcessedDim)
3128 maxProcessedDim = maxJpegDim;
3129 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3130 if (maxProcessedDim > maxRawDim) {
3131 maxRawDim = INT32_MAX;
3132
3133 for (size_t i = 0; i < count; i++) {
3134 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3135 gCamCapability[mCameraId]->raw_dim[i].height;
3136 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3137 maxRawDim = dimension;
3138 }
3139 }
3140
3141 //Find minimum durations for processed, jpeg, and raw
3142 for (size_t i = 0; i < count; i++) {
3143 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3144 gCamCapability[mCameraId]->raw_dim[i].height) {
3145 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3146 break;
3147 }
3148 }
3149 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3150 for (size_t i = 0; i < count; i++) {
3151 if (maxProcessedDim ==
3152 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3153 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3154 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3155 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3156 break;
3157 }
3158 }
3159}
3160
3161/*===========================================================================
3162 * FUNCTION : getMinFrameDuration
3163 *
3164 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3165 * and current request configuration.
3166 *
3167 * PARAMETERS : @request: requset sent by the frameworks
3168 *
3169 * RETURN : min farme duration for a particular request
3170 *
3171 *==========================================================================*/
3172int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3173{
3174 bool hasJpegStream = false;
3175 bool hasRawStream = false;
3176 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3177 const camera3_stream_t *stream = request->output_buffers[i].stream;
3178 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3179 hasJpegStream = true;
3180 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3181 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3182 stream->format == HAL_PIXEL_FORMAT_RAW16)
3183 hasRawStream = true;
3184 }
3185
3186 if (!hasJpegStream)
3187 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3188 else
3189 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3190}
3191
3192/*===========================================================================
3193 * FUNCTION : handleBuffersDuringFlushLock
3194 *
3195 * DESCRIPTION: Account for buffers returned from back-end during flush
3196 * This function is executed while mMutex is held by the caller.
3197 *
3198 * PARAMETERS :
3199 * @buffer: image buffer for the callback
3200 *
3201 * RETURN :
3202 *==========================================================================*/
3203void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3204{
3205 bool buffer_found = false;
3206 for (List<PendingBuffersInRequest>::iterator req =
3207 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3208 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3209 for (List<PendingBufferInfo>::iterator i =
3210 req->mPendingBufferList.begin();
3211 i != req->mPendingBufferList.end(); i++) {
3212 if (i->buffer == buffer->buffer) {
3213 mPendingBuffersMap.numPendingBufsAtFlush--;
3214 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3215 buffer->buffer, req->frame_number,
3216 mPendingBuffersMap.numPendingBufsAtFlush);
3217 buffer_found = true;
3218 break;
3219 }
3220 }
3221 if (buffer_found) {
3222 break;
3223 }
3224 }
3225 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3226 //signal the flush()
3227 LOGD("All buffers returned to HAL. Continue flush");
3228 pthread_cond_signal(&mBuffersCond);
3229 }
3230}
3231
Thierry Strudel3d639192016-09-09 11:52:26 -07003232/*===========================================================================
3233 * FUNCTION : handleBatchMetadata
3234 *
3235 * DESCRIPTION: Handles metadata buffer callback in batch mode
3236 *
3237 * PARAMETERS : @metadata_buf: metadata buffer
3238 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3239 * the meta buf in this method
3240 *
3241 * RETURN :
3242 *
3243 *==========================================================================*/
3244void QCamera3HardwareInterface::handleBatchMetadata(
3245 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3246{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003247 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003248
3249 if (NULL == metadata_buf) {
3250 LOGE("metadata_buf is NULL");
3251 return;
3252 }
3253 /* In batch mode, the metdata will contain the frame number and timestamp of
3254 * the last frame in the batch. Eg: a batch containing buffers from request
3255 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3256 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3257 * multiple process_capture_results */
3258 metadata_buffer_t *metadata =
3259 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3260 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3261 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3262 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3263 uint32_t frame_number = 0, urgent_frame_number = 0;
3264 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3265 bool invalid_metadata = false;
3266 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3267 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003268 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003269
3270 int32_t *p_frame_number_valid =
3271 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3272 uint32_t *p_frame_number =
3273 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3274 int64_t *p_capture_time =
3275 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3276 int32_t *p_urgent_frame_number_valid =
3277 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3278 uint32_t *p_urgent_frame_number =
3279 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3280
3281 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3282 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3283 (NULL == p_urgent_frame_number)) {
3284 LOGE("Invalid metadata");
3285 invalid_metadata = true;
3286 } else {
3287 frame_number_valid = *p_frame_number_valid;
3288 last_frame_number = *p_frame_number;
3289 last_frame_capture_time = *p_capture_time;
3290 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3291 last_urgent_frame_number = *p_urgent_frame_number;
3292 }
3293
3294 /* In batchmode, when no video buffers are requested, set_parms are sent
3295 * for every capture_request. The difference between consecutive urgent
3296 * frame numbers and frame numbers should be used to interpolate the
3297 * corresponding frame numbers and time stamps */
3298 pthread_mutex_lock(&mMutex);
3299 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003300 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3301 if(idx < 0) {
3302 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3303 last_urgent_frame_number);
3304 mState = ERROR;
3305 pthread_mutex_unlock(&mMutex);
3306 return;
3307 }
3308 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003309 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3310 first_urgent_frame_number;
3311
3312 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3313 urgent_frame_number_valid,
3314 first_urgent_frame_number, last_urgent_frame_number);
3315 }
3316
3317 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003318 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3319 if(idx < 0) {
3320 LOGE("Invalid frame number received: %d. Irrecoverable error",
3321 last_frame_number);
3322 mState = ERROR;
3323 pthread_mutex_unlock(&mMutex);
3324 return;
3325 }
3326 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003327 frameNumDiff = last_frame_number + 1 -
3328 first_frame_number;
3329 mPendingBatchMap.removeItem(last_frame_number);
3330
3331 LOGD("frm: valid: %d frm_num: %d - %d",
3332 frame_number_valid,
3333 first_frame_number, last_frame_number);
3334
3335 }
3336 pthread_mutex_unlock(&mMutex);
3337
3338 if (urgent_frame_number_valid || frame_number_valid) {
3339 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3340 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3341 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3342 urgentFrameNumDiff, last_urgent_frame_number);
3343 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3344 LOGE("frameNumDiff: %d frameNum: %d",
3345 frameNumDiff, last_frame_number);
3346 }
3347
3348 for (size_t i = 0; i < loopCount; i++) {
3349 /* handleMetadataWithLock is called even for invalid_metadata for
3350 * pipeline depth calculation */
3351 if (!invalid_metadata) {
3352 /* Infer frame number. Batch metadata contains frame number of the
3353 * last frame */
3354 if (urgent_frame_number_valid) {
3355 if (i < urgentFrameNumDiff) {
3356 urgent_frame_number =
3357 first_urgent_frame_number + i;
3358 LOGD("inferred urgent frame_number: %d",
3359 urgent_frame_number);
3360 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3361 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3362 } else {
3363 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3364 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3365 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3366 }
3367 }
3368
3369 /* Infer frame number. Batch metadata contains frame number of the
3370 * last frame */
3371 if (frame_number_valid) {
3372 if (i < frameNumDiff) {
3373 frame_number = first_frame_number + i;
3374 LOGD("inferred frame_number: %d", frame_number);
3375 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3376 CAM_INTF_META_FRAME_NUMBER, frame_number);
3377 } else {
3378 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3379 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3380 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3381 }
3382 }
3383
3384 if (last_frame_capture_time) {
3385 //Infer timestamp
3386 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003387 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003388 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003389 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003390 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3391 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3392 LOGD("batch capture_time: %lld, capture_time: %lld",
3393 last_frame_capture_time, capture_time);
3394 }
3395 }
3396 pthread_mutex_lock(&mMutex);
3397 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003398 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003399 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3400 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003401 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003402 pthread_mutex_unlock(&mMutex);
3403 }
3404
3405 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003406 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003407 mMetadataChannel->bufDone(metadata_buf);
3408 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003409 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003410 }
3411}
3412
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003413void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3414 camera3_error_msg_code_t errorCode)
3415{
3416 camera3_notify_msg_t notify_msg;
3417 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3418 notify_msg.type = CAMERA3_MSG_ERROR;
3419 notify_msg.message.error.error_code = errorCode;
3420 notify_msg.message.error.error_stream = NULL;
3421 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003422 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003423
3424 return;
3425}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003426
3427/*===========================================================================
3428 * FUNCTION : sendPartialMetadataWithLock
3429 *
3430 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3431 *
3432 * PARAMETERS : @metadata: metadata buffer
3433 * @requestIter: The iterator for the pending capture request for
3434 * which the partial result is being sen
3435 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3436 * last urgent metadata in a batch. Always true for non-batch mode
3437 *
3438 * RETURN :
3439 *
3440 *==========================================================================*/
3441
3442void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3443 metadata_buffer_t *metadata,
3444 const pendingRequestIterator requestIter,
3445 bool lastUrgentMetadataInBatch)
3446{
3447 camera3_capture_result_t result;
3448 memset(&result, 0, sizeof(camera3_capture_result_t));
3449
3450 requestIter->partial_result_cnt++;
3451
3452 // Extract 3A metadata
3453 result.result = translateCbUrgentMetadataToResultMetadata(
3454 metadata, lastUrgentMetadataInBatch);
3455 // Populate metadata result
3456 result.frame_number = requestIter->frame_number;
3457 result.num_output_buffers = 0;
3458 result.output_buffers = NULL;
3459 result.partial_result = requestIter->partial_result_cnt;
3460
3461 {
3462 Mutex::Autolock l(gHdrPlusClientLock);
3463 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3464 // Notify HDR+ client about the partial metadata.
3465 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3466 result.partial_result == PARTIAL_RESULT_COUNT);
3467 }
3468 }
3469
3470 orchestrateResult(&result);
3471 LOGD("urgent frame_number = %u", result.frame_number);
3472 free_camera_metadata((camera_metadata_t *)result.result);
3473}
3474
Thierry Strudel3d639192016-09-09 11:52:26 -07003475/*===========================================================================
3476 * FUNCTION : handleMetadataWithLock
3477 *
3478 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3479 *
3480 * PARAMETERS : @metadata_buf: metadata buffer
3481 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3482 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003483 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3484 * last urgent metadata in a batch. Always true for non-batch mode
3485 * @lastMetadataInBatch: Boolean to indicate whether this is the
3486 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003487 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3488 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003489 *
3490 * RETURN :
3491 *
3492 *==========================================================================*/
3493void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003494 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003495 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3496 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003497{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003498 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003499 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3500 //during flush do not send metadata from this thread
3501 LOGD("not sending metadata during flush or when mState is error");
3502 if (free_and_bufdone_meta_buf) {
3503 mMetadataChannel->bufDone(metadata_buf);
3504 free(metadata_buf);
3505 }
3506 return;
3507 }
3508
3509 //not in flush
3510 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3511 int32_t frame_number_valid, urgent_frame_number_valid;
3512 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003513 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003514 nsecs_t currentSysTime;
3515
3516 int32_t *p_frame_number_valid =
3517 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3518 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3519 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003520 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003521 int32_t *p_urgent_frame_number_valid =
3522 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3523 uint32_t *p_urgent_frame_number =
3524 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3525 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3526 metadata) {
3527 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3528 *p_frame_number_valid, *p_frame_number);
3529 }
3530
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003531 camera_metadata_t *resultMetadata = nullptr;
3532
Thierry Strudel3d639192016-09-09 11:52:26 -07003533 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3534 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3535 LOGE("Invalid metadata");
3536 if (free_and_bufdone_meta_buf) {
3537 mMetadataChannel->bufDone(metadata_buf);
3538 free(metadata_buf);
3539 }
3540 goto done_metadata;
3541 }
3542 frame_number_valid = *p_frame_number_valid;
3543 frame_number = *p_frame_number;
3544 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003545 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003546 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3547 urgent_frame_number = *p_urgent_frame_number;
3548 currentSysTime = systemTime(CLOCK_MONOTONIC);
3549
Jason Lee603176d2017-05-31 11:43:27 -07003550 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3551 const int tries = 3;
3552 nsecs_t bestGap, measured;
3553 for (int i = 0; i < tries; ++i) {
3554 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3555 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3556 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3557 const nsecs_t gap = tmono2 - tmono;
3558 if (i == 0 || gap < bestGap) {
3559 bestGap = gap;
3560 measured = tbase - ((tmono + tmono2) >> 1);
3561 }
3562 }
3563 capture_time -= measured;
3564 }
3565
Thierry Strudel3d639192016-09-09 11:52:26 -07003566 // Detect if buffers from any requests are overdue
3567 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003568 int64_t timeout;
3569 {
3570 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3571 // If there is a pending HDR+ request, the following requests may be blocked until the
3572 // HDR+ request is done. So allow a longer timeout.
3573 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3574 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3575 }
3576
3577 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003578 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003579 assert(missed.stream->priv);
3580 if (missed.stream->priv) {
3581 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3582 assert(ch->mStreams[0]);
3583 if (ch->mStreams[0]) {
3584 LOGE("Cancel missing frame = %d, buffer = %p,"
3585 "stream type = %d, stream format = %d",
3586 req.frame_number, missed.buffer,
3587 ch->mStreams[0]->getMyType(), missed.stream->format);
3588 ch->timeoutFrame(req.frame_number);
3589 }
3590 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003591 }
3592 }
3593 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003594 //For the very first metadata callback, regardless whether it contains valid
3595 //frame number, send the partial metadata for the jumpstarting requests.
3596 //Note that this has to be done even if the metadata doesn't contain valid
3597 //urgent frame number, because in the case only 1 request is ever submitted
3598 //to HAL, there won't be subsequent valid urgent frame number.
3599 if (mFirstMetadataCallback) {
3600 for (pendingRequestIterator i =
3601 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3602 if (i->bUseFirstPartial) {
3603 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3604 }
3605 }
3606 mFirstMetadataCallback = false;
3607 }
3608
Thierry Strudel3d639192016-09-09 11:52:26 -07003609 //Partial result on process_capture_result for timestamp
3610 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003611 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003612
3613 //Recieved an urgent Frame Number, handle it
3614 //using partial results
3615 for (pendingRequestIterator i =
3616 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3617 LOGD("Iterator Frame = %d urgent frame = %d",
3618 i->frame_number, urgent_frame_number);
3619
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003620 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003621 (i->partial_result_cnt == 0)) {
3622 LOGE("Error: HAL missed urgent metadata for frame number %d",
3623 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003624 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003625 }
3626
3627 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003628 i->partial_result_cnt == 0) {
3629 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003630 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3631 // Instant AEC settled for this frame.
3632 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3633 mInstantAECSettledFrameNumber = urgent_frame_number;
3634 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003635 break;
3636 }
3637 }
3638 }
3639
3640 if (!frame_number_valid) {
3641 LOGD("Not a valid normal frame number, used as SOF only");
3642 if (free_and_bufdone_meta_buf) {
3643 mMetadataChannel->bufDone(metadata_buf);
3644 free(metadata_buf);
3645 }
3646 goto done_metadata;
3647 }
3648 LOGH("valid frame_number = %u, capture_time = %lld",
3649 frame_number, capture_time);
3650
Emilian Peev7650c122017-01-19 08:24:33 -08003651 if (metadata->is_depth_data_valid) {
3652 handleDepthDataLocked(metadata->depth_data, frame_number);
3653 }
3654
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003655 // Check whether any stream buffer corresponding to this is dropped or not
3656 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3657 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3658 for (auto & pendingRequest : mPendingRequestsList) {
3659 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3660 mInstantAECSettledFrameNumber)) {
3661 camera3_notify_msg_t notify_msg = {};
3662 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003663 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003664 QCamera3ProcessingChannel *channel =
3665 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003666 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003667 if (p_cam_frame_drop) {
3668 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003669 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003670 // Got the stream ID for drop frame.
3671 dropFrame = true;
3672 break;
3673 }
3674 }
3675 } else {
3676 // This is instant AEC case.
3677 // For instant AEC drop the stream untill AEC is settled.
3678 dropFrame = true;
3679 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003680
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003681 if (dropFrame) {
3682 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3683 if (p_cam_frame_drop) {
3684 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003685 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003686 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003687 } else {
3688 // For instant AEC, inform frame drop and frame number
3689 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3690 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003691 pendingRequest.frame_number, streamID,
3692 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003693 }
3694 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003695 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003696 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003697 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003698 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003699 if (p_cam_frame_drop) {
3700 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003701 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003702 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003703 } else {
3704 // For instant AEC, inform frame drop and frame number
3705 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3706 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003707 pendingRequest.frame_number, streamID,
3708 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003709 }
3710 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003711 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003712 PendingFrameDrop.stream_ID = streamID;
3713 // Add the Frame drop info to mPendingFrameDropList
3714 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003715 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003716 }
3717 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003718 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003719
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003720 for (auto & pendingRequest : mPendingRequestsList) {
3721 // Find the pending request with the frame number.
3722 if (pendingRequest.frame_number == frame_number) {
3723 // Update the sensor timestamp.
3724 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003725
Thierry Strudel3d639192016-09-09 11:52:26 -07003726
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003727 /* Set the timestamp in display metadata so that clients aware of
3728 private_handle such as VT can use this un-modified timestamps.
3729 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003730 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003731
Thierry Strudel3d639192016-09-09 11:52:26 -07003732 // Find channel requiring metadata, meaning internal offline postprocess
3733 // is needed.
3734 //TODO: for now, we don't support two streams requiring metadata at the same time.
3735 // (because we are not making copies, and metadata buffer is not reference counted.
3736 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003737 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3738 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003739 if (iter->need_metadata) {
3740 internalPproc = true;
3741 QCamera3ProcessingChannel *channel =
3742 (QCamera3ProcessingChannel *)iter->stream->priv;
3743 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003744 if(p_is_metabuf_queued != NULL) {
3745 *p_is_metabuf_queued = true;
3746 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003747 break;
3748 }
3749 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003750 for (auto itr = pendingRequest.internalRequestList.begin();
3751 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003752 if (itr->need_metadata) {
3753 internalPproc = true;
3754 QCamera3ProcessingChannel *channel =
3755 (QCamera3ProcessingChannel *)itr->stream->priv;
3756 channel->queueReprocMetadata(metadata_buf);
3757 break;
3758 }
3759 }
3760
Thierry Strudel54dc9782017-02-15 12:12:10 -08003761 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003762
3763 bool *enableZsl = nullptr;
3764 if (gExposeEnableZslKey) {
3765 enableZsl = &pendingRequest.enableZsl;
3766 }
3767
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003768 resultMetadata = translateFromHalMetadata(metadata,
3769 pendingRequest.timestamp, pendingRequest.request_id,
3770 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3771 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003772 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003773 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003774 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003775 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003776 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003777 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003778
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003779 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003780
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003781 if (pendingRequest.blob_request) {
3782 //Dump tuning metadata if enabled and available
3783 char prop[PROPERTY_VALUE_MAX];
3784 memset(prop, 0, sizeof(prop));
3785 property_get("persist.camera.dumpmetadata", prop, "0");
3786 int32_t enabled = atoi(prop);
3787 if (enabled && metadata->is_tuning_params_valid) {
3788 dumpMetadataToFile(metadata->tuning_params,
3789 mMetaFrameCount,
3790 enabled,
3791 "Snapshot",
3792 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003793 }
3794 }
3795
3796 if (!internalPproc) {
3797 LOGD("couldn't find need_metadata for this metadata");
3798 // Return metadata buffer
3799 if (free_and_bufdone_meta_buf) {
3800 mMetadataChannel->bufDone(metadata_buf);
3801 free(metadata_buf);
3802 }
3803 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003804
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003805 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003806 }
3807 }
3808
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003809 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3810
3811 // Try to send out capture result metadata.
3812 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003813 return;
3814
Thierry Strudel3d639192016-09-09 11:52:26 -07003815done_metadata:
3816 for (pendingRequestIterator i = mPendingRequestsList.begin();
3817 i != mPendingRequestsList.end() ;i++) {
3818 i->pipeline_depth++;
3819 }
3820 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3821 unblockRequestIfNecessary();
3822}
3823
3824/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003825 * FUNCTION : handleDepthDataWithLock
3826 *
3827 * DESCRIPTION: Handles incoming depth data
3828 *
3829 * PARAMETERS : @depthData : Depth data
3830 * @frameNumber: Frame number of the incoming depth data
3831 *
3832 * RETURN :
3833 *
3834 *==========================================================================*/
3835void QCamera3HardwareInterface::handleDepthDataLocked(
3836 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3837 uint32_t currentFrameNumber;
3838 buffer_handle_t *depthBuffer;
3839
3840 if (nullptr == mDepthChannel) {
3841 LOGE("Depth channel not present!");
3842 return;
3843 }
3844
3845 camera3_stream_buffer_t resultBuffer =
3846 {.acquire_fence = -1,
3847 .release_fence = -1,
3848 .status = CAMERA3_BUFFER_STATUS_OK,
3849 .buffer = nullptr,
3850 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003851 do {
3852 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3853 if (nullptr == depthBuffer) {
3854 break;
3855 }
3856
Emilian Peev7650c122017-01-19 08:24:33 -08003857 resultBuffer.buffer = depthBuffer;
3858 if (currentFrameNumber == frameNumber) {
3859 int32_t rc = mDepthChannel->populateDepthData(depthData,
3860 frameNumber);
3861 if (NO_ERROR != rc) {
3862 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3863 } else {
3864 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3865 }
3866 } else if (currentFrameNumber > frameNumber) {
3867 break;
3868 } else {
3869 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3870 {{currentFrameNumber, mDepthChannel->getStream(),
3871 CAMERA3_MSG_ERROR_BUFFER}}};
3872 orchestrateNotify(&notify_msg);
3873
3874 LOGE("Depth buffer for frame number: %d is missing "
3875 "returning back!", currentFrameNumber);
3876 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3877 }
3878 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003879 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003880 } while (currentFrameNumber < frameNumber);
3881}
3882
3883/*===========================================================================
3884 * FUNCTION : notifyErrorFoPendingDepthData
3885 *
3886 * DESCRIPTION: Returns error for any pending depth buffers
3887 *
3888 * PARAMETERS : depthCh - depth channel that needs to get flushed
3889 *
3890 * RETURN :
3891 *
3892 *==========================================================================*/
3893void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3894 QCamera3DepthChannel *depthCh) {
3895 uint32_t currentFrameNumber;
3896 buffer_handle_t *depthBuffer;
3897
3898 if (nullptr == depthCh) {
3899 return;
3900 }
3901
3902 camera3_notify_msg_t notify_msg =
3903 {.type = CAMERA3_MSG_ERROR,
3904 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3905 camera3_stream_buffer_t resultBuffer =
3906 {.acquire_fence = -1,
3907 .release_fence = -1,
3908 .buffer = nullptr,
3909 .stream = depthCh->getStream(),
3910 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003911
3912 while (nullptr !=
3913 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3914 depthCh->unmapBuffer(currentFrameNumber);
3915
3916 notify_msg.message.error.frame_number = currentFrameNumber;
3917 orchestrateNotify(&notify_msg);
3918
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003919 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003920 };
3921}
3922
3923/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003924 * FUNCTION : hdrPlusPerfLock
3925 *
3926 * DESCRIPTION: perf lock for HDR+ using custom intent
3927 *
3928 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3929 *
3930 * RETURN : None
3931 *
3932 *==========================================================================*/
3933void QCamera3HardwareInterface::hdrPlusPerfLock(
3934 mm_camera_super_buf_t *metadata_buf)
3935{
3936 if (NULL == metadata_buf) {
3937 LOGE("metadata_buf is NULL");
3938 return;
3939 }
3940 metadata_buffer_t *metadata =
3941 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3942 int32_t *p_frame_number_valid =
3943 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3944 uint32_t *p_frame_number =
3945 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3946
3947 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3948 LOGE("%s: Invalid metadata", __func__);
3949 return;
3950 }
3951
3952 //acquire perf lock for 5 sec after the last HDR frame is captured
3953 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3954 if ((p_frame_number != NULL) &&
3955 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003956 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003957 }
3958 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003959}
3960
3961/*===========================================================================
3962 * FUNCTION : handleInputBufferWithLock
3963 *
3964 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3965 *
3966 * PARAMETERS : @frame_number: frame number of the input buffer
3967 *
3968 * RETURN :
3969 *
3970 *==========================================================================*/
3971void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3972{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003973 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003974 pendingRequestIterator i = mPendingRequestsList.begin();
3975 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3976 i++;
3977 }
3978 if (i != mPendingRequestsList.end() && i->input_buffer) {
3979 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003980 CameraMetadata settings;
3981 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3982 if(i->settings) {
3983 settings = i->settings;
3984 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3985 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07003986 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003987 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07003988 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003989 } else {
3990 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07003991 }
3992
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003993 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3994 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3995 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07003996
3997 camera3_capture_result result;
3998 memset(&result, 0, sizeof(camera3_capture_result));
3999 result.frame_number = frame_number;
4000 result.result = i->settings;
4001 result.input_buffer = i->input_buffer;
4002 result.partial_result = PARTIAL_RESULT_COUNT;
4003
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004004 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004005 LOGD("Input request metadata and input buffer frame_number = %u",
4006 i->frame_number);
4007 i = erasePendingRequest(i);
4008 } else {
4009 LOGE("Could not find input request for frame number %d", frame_number);
4010 }
4011}
4012
4013/*===========================================================================
4014 * FUNCTION : handleBufferWithLock
4015 *
4016 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4017 *
4018 * PARAMETERS : @buffer: image buffer for the callback
4019 * @frame_number: frame number of the image buffer
4020 *
4021 * RETURN :
4022 *
4023 *==========================================================================*/
4024void QCamera3HardwareInterface::handleBufferWithLock(
4025 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4026{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004027 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004028
4029 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4030 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4031 }
4032
Thierry Strudel3d639192016-09-09 11:52:26 -07004033 /* Nothing to be done during error state */
4034 if ((ERROR == mState) || (DEINIT == mState)) {
4035 return;
4036 }
4037 if (mFlushPerf) {
4038 handleBuffersDuringFlushLock(buffer);
4039 return;
4040 }
4041 //not in flush
4042 // If the frame number doesn't exist in the pending request list,
4043 // directly send the buffer to the frameworks, and update pending buffers map
4044 // Otherwise, book-keep the buffer.
4045 pendingRequestIterator i = mPendingRequestsList.begin();
4046 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4047 i++;
4048 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004049
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004050 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004051 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004052 // For a reprocessing request, try to send out result metadata.
4053 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004054 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004055 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004056
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004057 // Check if this frame was dropped.
4058 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4059 m != mPendingFrameDropList.end(); m++) {
4060 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4061 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4062 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4063 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4064 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4065 frame_number, streamID);
4066 m = mPendingFrameDropList.erase(m);
4067 break;
4068 }
4069 }
4070
4071 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4072 LOGH("result frame_number = %d, buffer = %p",
4073 frame_number, buffer->buffer);
4074
4075 mPendingBuffersMap.removeBuf(buffer->buffer);
4076 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4077
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004078 if (mPreviewStarted == false) {
4079 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4080 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004081 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4082
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004083 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4084 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4085 mPreviewStarted = true;
4086
4087 // Set power hint for preview
4088 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4089 }
4090 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004091}
4092
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004093void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004094 const camera_metadata_t *resultMetadata)
4095{
4096 // Find the pending request for this result metadata.
4097 auto requestIter = mPendingRequestsList.begin();
4098 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4099 requestIter++;
4100 }
4101
4102 if (requestIter == mPendingRequestsList.end()) {
4103 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4104 return;
4105 }
4106
4107 // Update the result metadata
4108 requestIter->resultMetadata = resultMetadata;
4109
4110 // Check what type of request this is.
4111 bool liveRequest = false;
4112 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004113 // HDR+ request doesn't have partial results.
4114 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004115 } else if (requestIter->input_buffer != nullptr) {
4116 // Reprocessing request result is the same as settings.
4117 requestIter->resultMetadata = requestIter->settings;
4118 // Reprocessing request doesn't have partial results.
4119 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4120 } else {
4121 liveRequest = true;
4122 requestIter->partial_result_cnt++;
4123 mPendingLiveRequest--;
4124
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004125 {
4126 Mutex::Autolock l(gHdrPlusClientLock);
4127 // For a live request, send the metadata to HDR+ client.
4128 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4129 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4130 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4131 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004132 }
4133 }
4134
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004135 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4136 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004137 bool readyToSend = true;
4138
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004139 // Iterate through the pending requests to send out result metadata that are ready. Also if
4140 // this result metadata belongs to a live request, notify errors for previous live requests
4141 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004142 auto iter = mPendingRequestsList.begin();
4143 while (iter != mPendingRequestsList.end()) {
4144 // Check if current pending request is ready. If it's not ready, the following pending
4145 // requests are also not ready.
4146 if (readyToSend && iter->resultMetadata == nullptr) {
4147 readyToSend = false;
4148 }
4149
4150 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4151
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004152 camera3_capture_result_t result = {};
4153 result.frame_number = iter->frame_number;
4154 result.result = iter->resultMetadata;
4155 result.partial_result = iter->partial_result_cnt;
4156
4157 // If this pending buffer has result metadata, we may be able to send out shutter callback
4158 // and result metadata.
4159 if (iter->resultMetadata != nullptr) {
4160 if (!readyToSend) {
4161 // If any of the previous pending request is not ready, this pending request is
4162 // also not ready to send in order to keep shutter callbacks and result metadata
4163 // in order.
4164 iter++;
4165 continue;
4166 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004167 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4168 // If the result metadata belongs to a live request, notify errors for previous pending
4169 // live requests.
4170 mPendingLiveRequest--;
4171
4172 CameraMetadata dummyMetadata;
4173 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4174 result.result = dummyMetadata.release();
4175
4176 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004177
4178 // partial_result should be PARTIAL_RESULT_CNT in case of
4179 // ERROR_RESULT.
4180 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4181 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004182 } else {
4183 iter++;
4184 continue;
4185 }
4186
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004187 result.output_buffers = nullptr;
4188 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004189 orchestrateResult(&result);
4190
4191 // For reprocessing, result metadata is the same as settings so do not free it here to
4192 // avoid double free.
4193 if (result.result != iter->settings) {
4194 free_camera_metadata((camera_metadata_t *)result.result);
4195 }
4196 iter->resultMetadata = nullptr;
4197 iter = erasePendingRequest(iter);
4198 }
4199
4200 if (liveRequest) {
4201 for (auto &iter : mPendingRequestsList) {
4202 // Increment pipeline depth for the following pending requests.
4203 if (iter.frame_number > frameNumber) {
4204 iter.pipeline_depth++;
4205 }
4206 }
4207 }
4208
4209 unblockRequestIfNecessary();
4210}
4211
Thierry Strudel3d639192016-09-09 11:52:26 -07004212/*===========================================================================
4213 * FUNCTION : unblockRequestIfNecessary
4214 *
4215 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4216 * that mMutex is held when this function is called.
4217 *
4218 * PARAMETERS :
4219 *
4220 * RETURN :
4221 *
4222 *==========================================================================*/
4223void QCamera3HardwareInterface::unblockRequestIfNecessary()
4224{
4225 // Unblock process_capture_request
4226 pthread_cond_signal(&mRequestCond);
4227}
4228
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004229/*===========================================================================
4230 * FUNCTION : isHdrSnapshotRequest
4231 *
4232 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4233 *
4234 * PARAMETERS : camera3 request structure
4235 *
4236 * RETURN : boolean decision variable
4237 *
4238 *==========================================================================*/
4239bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4240{
4241 if (request == NULL) {
4242 LOGE("Invalid request handle");
4243 assert(0);
4244 return false;
4245 }
4246
4247 if (!mForceHdrSnapshot) {
4248 CameraMetadata frame_settings;
4249 frame_settings = request->settings;
4250
4251 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4252 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4253 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4254 return false;
4255 }
4256 } else {
4257 return false;
4258 }
4259
4260 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4261 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4262 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4263 return false;
4264 }
4265 } else {
4266 return false;
4267 }
4268 }
4269
4270 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4271 if (request->output_buffers[i].stream->format
4272 == HAL_PIXEL_FORMAT_BLOB) {
4273 return true;
4274 }
4275 }
4276
4277 return false;
4278}
4279/*===========================================================================
4280 * FUNCTION : orchestrateRequest
4281 *
4282 * DESCRIPTION: Orchestrates a capture request from camera service
4283 *
4284 * PARAMETERS :
4285 * @request : request from framework to process
4286 *
4287 * RETURN : Error status codes
4288 *
4289 *==========================================================================*/
4290int32_t QCamera3HardwareInterface::orchestrateRequest(
4291 camera3_capture_request_t *request)
4292{
4293
4294 uint32_t originalFrameNumber = request->frame_number;
4295 uint32_t originalOutputCount = request->num_output_buffers;
4296 const camera_metadata_t *original_settings = request->settings;
4297 List<InternalRequest> internallyRequestedStreams;
4298 List<InternalRequest> emptyInternalList;
4299
4300 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4301 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4302 uint32_t internalFrameNumber;
4303 CameraMetadata modified_meta;
4304
4305
4306 /* Add Blob channel to list of internally requested streams */
4307 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4308 if (request->output_buffers[i].stream->format
4309 == HAL_PIXEL_FORMAT_BLOB) {
4310 InternalRequest streamRequested;
4311 streamRequested.meteringOnly = 1;
4312 streamRequested.need_metadata = 0;
4313 streamRequested.stream = request->output_buffers[i].stream;
4314 internallyRequestedStreams.push_back(streamRequested);
4315 }
4316 }
4317 request->num_output_buffers = 0;
4318 auto itr = internallyRequestedStreams.begin();
4319
4320 /* Modify setting to set compensation */
4321 modified_meta = request->settings;
4322 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4323 uint8_t aeLock = 1;
4324 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4325 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4326 camera_metadata_t *modified_settings = modified_meta.release();
4327 request->settings = modified_settings;
4328
4329 /* Capture Settling & -2x frame */
4330 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4331 request->frame_number = internalFrameNumber;
4332 processCaptureRequest(request, internallyRequestedStreams);
4333
4334 request->num_output_buffers = originalOutputCount;
4335 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4336 request->frame_number = internalFrameNumber;
4337 processCaptureRequest(request, emptyInternalList);
4338 request->num_output_buffers = 0;
4339
4340 modified_meta = modified_settings;
4341 expCompensation = 0;
4342 aeLock = 1;
4343 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4344 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4345 modified_settings = modified_meta.release();
4346 request->settings = modified_settings;
4347
4348 /* Capture Settling & 0X frame */
4349
4350 itr = internallyRequestedStreams.begin();
4351 if (itr == internallyRequestedStreams.end()) {
4352 LOGE("Error Internally Requested Stream list is empty");
4353 assert(0);
4354 } else {
4355 itr->need_metadata = 0;
4356 itr->meteringOnly = 1;
4357 }
4358
4359 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4360 request->frame_number = internalFrameNumber;
4361 processCaptureRequest(request, internallyRequestedStreams);
4362
4363 itr = internallyRequestedStreams.begin();
4364 if (itr == internallyRequestedStreams.end()) {
4365 ALOGE("Error Internally Requested Stream list is empty");
4366 assert(0);
4367 } else {
4368 itr->need_metadata = 1;
4369 itr->meteringOnly = 0;
4370 }
4371
4372 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4373 request->frame_number = internalFrameNumber;
4374 processCaptureRequest(request, internallyRequestedStreams);
4375
4376 /* Capture 2X frame*/
4377 modified_meta = modified_settings;
4378 expCompensation = GB_HDR_2X_STEP_EV;
4379 aeLock = 1;
4380 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4381 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4382 modified_settings = modified_meta.release();
4383 request->settings = modified_settings;
4384
4385 itr = internallyRequestedStreams.begin();
4386 if (itr == internallyRequestedStreams.end()) {
4387 ALOGE("Error Internally Requested Stream list is empty");
4388 assert(0);
4389 } else {
4390 itr->need_metadata = 0;
4391 itr->meteringOnly = 1;
4392 }
4393 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4394 request->frame_number = internalFrameNumber;
4395 processCaptureRequest(request, internallyRequestedStreams);
4396
4397 itr = internallyRequestedStreams.begin();
4398 if (itr == internallyRequestedStreams.end()) {
4399 ALOGE("Error Internally Requested Stream list is empty");
4400 assert(0);
4401 } else {
4402 itr->need_metadata = 1;
4403 itr->meteringOnly = 0;
4404 }
4405
4406 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4407 request->frame_number = internalFrameNumber;
4408 processCaptureRequest(request, internallyRequestedStreams);
4409
4410
4411 /* Capture 2X on original streaming config*/
4412 internallyRequestedStreams.clear();
4413
4414 /* Restore original settings pointer */
4415 request->settings = original_settings;
4416 } else {
4417 uint32_t internalFrameNumber;
4418 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4419 request->frame_number = internalFrameNumber;
4420 return processCaptureRequest(request, internallyRequestedStreams);
4421 }
4422
4423 return NO_ERROR;
4424}
4425
4426/*===========================================================================
4427 * FUNCTION : orchestrateResult
4428 *
4429 * DESCRIPTION: Orchestrates a capture result to camera service
4430 *
4431 * PARAMETERS :
4432 * @request : request from framework to process
4433 *
4434 * RETURN :
4435 *
4436 *==========================================================================*/
4437void QCamera3HardwareInterface::orchestrateResult(
4438 camera3_capture_result_t *result)
4439{
4440 uint32_t frameworkFrameNumber;
4441 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4442 frameworkFrameNumber);
4443 if (rc != NO_ERROR) {
4444 LOGE("Cannot find translated frameworkFrameNumber");
4445 assert(0);
4446 } else {
4447 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004448 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004449 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004450 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004451 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4452 camera_metadata_entry_t entry;
4453 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4454 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004455 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004456 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4457 if (ret != OK)
4458 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004459 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004460 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004461 result->frame_number = frameworkFrameNumber;
4462 mCallbackOps->process_capture_result(mCallbackOps, result);
4463 }
4464 }
4465}
4466
4467/*===========================================================================
4468 * FUNCTION : orchestrateNotify
4469 *
4470 * DESCRIPTION: Orchestrates a notify to camera service
4471 *
4472 * PARAMETERS :
4473 * @request : request from framework to process
4474 *
4475 * RETURN :
4476 *
4477 *==========================================================================*/
4478void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4479{
4480 uint32_t frameworkFrameNumber;
4481 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004482 int32_t rc = NO_ERROR;
4483
4484 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004485 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004486
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004487 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004488 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4489 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4490 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004491 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004492 LOGE("Cannot find translated frameworkFrameNumber");
4493 assert(0);
4494 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004495 }
4496 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004497
4498 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4499 LOGD("Internal Request drop the notifyCb");
4500 } else {
4501 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4502 mCallbackOps->notify(mCallbackOps, notify_msg);
4503 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004504}
4505
4506/*===========================================================================
4507 * FUNCTION : FrameNumberRegistry
4508 *
4509 * DESCRIPTION: Constructor
4510 *
4511 * PARAMETERS :
4512 *
4513 * RETURN :
4514 *
4515 *==========================================================================*/
4516FrameNumberRegistry::FrameNumberRegistry()
4517{
4518 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4519}
4520
4521/*===========================================================================
4522 * FUNCTION : ~FrameNumberRegistry
4523 *
4524 * DESCRIPTION: Destructor
4525 *
4526 * PARAMETERS :
4527 *
4528 * RETURN :
4529 *
4530 *==========================================================================*/
4531FrameNumberRegistry::~FrameNumberRegistry()
4532{
4533}
4534
4535/*===========================================================================
4536 * FUNCTION : PurgeOldEntriesLocked
4537 *
4538 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4539 *
4540 * PARAMETERS :
4541 *
4542 * RETURN : NONE
4543 *
4544 *==========================================================================*/
4545void FrameNumberRegistry::purgeOldEntriesLocked()
4546{
4547 while (_register.begin() != _register.end()) {
4548 auto itr = _register.begin();
4549 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4550 _register.erase(itr);
4551 } else {
4552 return;
4553 }
4554 }
4555}
4556
4557/*===========================================================================
4558 * FUNCTION : allocStoreInternalFrameNumber
4559 *
4560 * DESCRIPTION: Method to note down a framework request and associate a new
4561 * internal request number against it
4562 *
4563 * PARAMETERS :
4564 * @fFrameNumber: Identifier given by framework
4565 * @internalFN : Output parameter which will have the newly generated internal
4566 * entry
4567 *
4568 * RETURN : Error code
4569 *
4570 *==========================================================================*/
4571int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4572 uint32_t &internalFrameNumber)
4573{
4574 Mutex::Autolock lock(mRegistryLock);
4575 internalFrameNumber = _nextFreeInternalNumber++;
4576 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4577 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4578 purgeOldEntriesLocked();
4579 return NO_ERROR;
4580}
4581
4582/*===========================================================================
4583 * FUNCTION : generateStoreInternalFrameNumber
4584 *
4585 * DESCRIPTION: Method to associate a new internal request number independent
4586 * of any associate with framework requests
4587 *
4588 * PARAMETERS :
4589 * @internalFrame#: Output parameter which will have the newly generated internal
4590 *
4591 *
4592 * RETURN : Error code
4593 *
4594 *==========================================================================*/
4595int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4596{
4597 Mutex::Autolock lock(mRegistryLock);
4598 internalFrameNumber = _nextFreeInternalNumber++;
4599 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4600 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4601 purgeOldEntriesLocked();
4602 return NO_ERROR;
4603}
4604
4605/*===========================================================================
4606 * FUNCTION : getFrameworkFrameNumber
4607 *
4608 * DESCRIPTION: Method to query the framework framenumber given an internal #
4609 *
4610 * PARAMETERS :
4611 * @internalFrame#: Internal reference
4612 * @frameworkframenumber: Output parameter holding framework frame entry
4613 *
4614 * RETURN : Error code
4615 *
4616 *==========================================================================*/
4617int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4618 uint32_t &frameworkFrameNumber)
4619{
4620 Mutex::Autolock lock(mRegistryLock);
4621 auto itr = _register.find(internalFrameNumber);
4622 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004623 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004624 return -ENOENT;
4625 }
4626
4627 frameworkFrameNumber = itr->second;
4628 purgeOldEntriesLocked();
4629 return NO_ERROR;
4630}
Thierry Strudel3d639192016-09-09 11:52:26 -07004631
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004632status_t QCamera3HardwareInterface::fillPbStreamConfig(
4633 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4634 QCamera3Channel *channel, uint32_t streamIndex) {
4635 if (config == nullptr) {
4636 LOGE("%s: config is null", __FUNCTION__);
4637 return BAD_VALUE;
4638 }
4639
4640 if (channel == nullptr) {
4641 LOGE("%s: channel is null", __FUNCTION__);
4642 return BAD_VALUE;
4643 }
4644
4645 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4646 if (stream == nullptr) {
4647 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4648 return NAME_NOT_FOUND;
4649 }
4650
4651 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4652 if (streamInfo == nullptr) {
4653 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4654 return NAME_NOT_FOUND;
4655 }
4656
4657 config->id = pbStreamId;
4658 config->image.width = streamInfo->dim.width;
4659 config->image.height = streamInfo->dim.height;
4660 config->image.padding = 0;
4661 config->image.format = pbStreamFormat;
4662
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004663 uint32_t totalPlaneSize = 0;
4664
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004665 // Fill plane information.
4666 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4667 pbcamera::PlaneConfiguration plane;
4668 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4669 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4670 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004671
4672 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004673 }
4674
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004675 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004676 return OK;
4677}
4678
Thierry Strudel3d639192016-09-09 11:52:26 -07004679/*===========================================================================
4680 * FUNCTION : processCaptureRequest
4681 *
4682 * DESCRIPTION: process a capture request from camera service
4683 *
4684 * PARAMETERS :
4685 * @request : request from framework to process
4686 *
4687 * RETURN :
4688 *
4689 *==========================================================================*/
4690int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004691 camera3_capture_request_t *request,
4692 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004693{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004694 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004695 int rc = NO_ERROR;
4696 int32_t request_id;
4697 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004698 bool isVidBufRequested = false;
4699 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004700 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004701
4702 pthread_mutex_lock(&mMutex);
4703
4704 // Validate current state
4705 switch (mState) {
4706 case CONFIGURED:
4707 case STARTED:
4708 /* valid state */
4709 break;
4710
4711 case ERROR:
4712 pthread_mutex_unlock(&mMutex);
4713 handleCameraDeviceError();
4714 return -ENODEV;
4715
4716 default:
4717 LOGE("Invalid state %d", mState);
4718 pthread_mutex_unlock(&mMutex);
4719 return -ENODEV;
4720 }
4721
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004722 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004723 if (rc != NO_ERROR) {
4724 LOGE("incoming request is not valid");
4725 pthread_mutex_unlock(&mMutex);
4726 return rc;
4727 }
4728
4729 meta = request->settings;
4730
4731 // For first capture request, send capture intent, and
4732 // stream on all streams
4733 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004734 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004735 // send an unconfigure to the backend so that the isp
4736 // resources are deallocated
4737 if (!mFirstConfiguration) {
4738 cam_stream_size_info_t stream_config_info;
4739 int32_t hal_version = CAM_HAL_V3;
4740 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4741 stream_config_info.buffer_info.min_buffers =
4742 MIN_INFLIGHT_REQUESTS;
4743 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004744 m_bIs4KVideo ? 0 :
4745 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004746 clear_metadata_buffer(mParameters);
4747 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4748 CAM_INTF_PARM_HAL_VERSION, hal_version);
4749 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4750 CAM_INTF_META_STREAM_INFO, stream_config_info);
4751 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4752 mParameters);
4753 if (rc < 0) {
4754 LOGE("set_parms for unconfigure failed");
4755 pthread_mutex_unlock(&mMutex);
4756 return rc;
4757 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004758
Thierry Strudel3d639192016-09-09 11:52:26 -07004759 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004760 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004761 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004762 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004763 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004764 property_get("persist.camera.is_type", is_type_value, "4");
4765 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4766 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4767 property_get("persist.camera.is_type_preview", is_type_value, "4");
4768 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4769 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004770
4771 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4772 int32_t hal_version = CAM_HAL_V3;
4773 uint8_t captureIntent =
4774 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4775 mCaptureIntent = captureIntent;
4776 clear_metadata_buffer(mParameters);
4777 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4778 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4779 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004780 if (mFirstConfiguration) {
4781 // configure instant AEC
4782 // Instant AEC is a session based parameter and it is needed only
4783 // once per complete session after open camera.
4784 // i.e. This is set only once for the first capture request, after open camera.
4785 setInstantAEC(meta);
4786 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004787 uint8_t fwkVideoStabMode=0;
4788 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4789 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4790 }
4791
Xue Tuecac74e2017-04-17 13:58:15 -07004792 // If EIS setprop is enabled then only turn it on for video/preview
4793 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004794 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004795 int32_t vsMode;
4796 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4797 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4798 rc = BAD_VALUE;
4799 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004800 LOGD("setEis %d", setEis);
4801 bool eis3Supported = false;
4802 size_t count = IS_TYPE_MAX;
4803 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4804 for (size_t i = 0; i < count; i++) {
4805 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4806 eis3Supported = true;
4807 break;
4808 }
4809 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004810
4811 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004812 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004813 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4814 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004815 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4816 is_type = isTypePreview;
4817 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4818 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4819 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004820 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004821 } else {
4822 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004823 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004824 } else {
4825 is_type = IS_TYPE_NONE;
4826 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004827 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004828 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004829 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4830 }
4831 }
4832
4833 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4834 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4835
Thierry Strudel54dc9782017-02-15 12:12:10 -08004836 //Disable tintless only if the property is set to 0
4837 memset(prop, 0, sizeof(prop));
4838 property_get("persist.camera.tintless.enable", prop, "1");
4839 int32_t tintless_value = atoi(prop);
4840
Thierry Strudel3d639192016-09-09 11:52:26 -07004841 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4842 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004843
Thierry Strudel3d639192016-09-09 11:52:26 -07004844 //Disable CDS for HFR mode or if DIS/EIS is on.
4845 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4846 //after every configure_stream
4847 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4848 (m_bIsVideo)) {
4849 int32_t cds = CAM_CDS_MODE_OFF;
4850 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4851 CAM_INTF_PARM_CDS_MODE, cds))
4852 LOGE("Failed to disable CDS for HFR mode");
4853
4854 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004855
4856 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4857 uint8_t* use_av_timer = NULL;
4858
4859 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004860 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004861 use_av_timer = &m_debug_avtimer;
4862 }
4863 else{
4864 use_av_timer =
4865 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004866 if (use_av_timer) {
4867 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4868 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004869 }
4870
4871 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4872 rc = BAD_VALUE;
4873 }
4874 }
4875
Thierry Strudel3d639192016-09-09 11:52:26 -07004876 setMobicat();
4877
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004878 uint8_t nrMode = 0;
4879 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4880 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4881 }
4882
Thierry Strudel3d639192016-09-09 11:52:26 -07004883 /* Set fps and hfr mode while sending meta stream info so that sensor
4884 * can configure appropriate streaming mode */
4885 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004886 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4887 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004888 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4889 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004890 if (rc == NO_ERROR) {
4891 int32_t max_fps =
4892 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004893 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004894 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4895 }
4896 /* For HFR, more buffers are dequeued upfront to improve the performance */
4897 if (mBatchSize) {
4898 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4899 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4900 }
4901 }
4902 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 LOGE("setHalFpsRange failed");
4904 }
4905 }
4906 if (meta.exists(ANDROID_CONTROL_MODE)) {
4907 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4908 rc = extractSceneMode(meta, metaMode, mParameters);
4909 if (rc != NO_ERROR) {
4910 LOGE("extractSceneMode failed");
4911 }
4912 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004913 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004914
Thierry Strudel04e026f2016-10-10 11:27:36 -07004915 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4916 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4917 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4918 rc = setVideoHdrMode(mParameters, vhdr);
4919 if (rc != NO_ERROR) {
4920 LOGE("setVideoHDR is failed");
4921 }
4922 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004923
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004924 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004925 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004926 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004927 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4929 sensorModeFullFov)) {
4930 rc = BAD_VALUE;
4931 }
4932 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004933 //TODO: validate the arguments, HSV scenemode should have only the
4934 //advertised fps ranges
4935
4936 /*set the capture intent, hal version, tintless, stream info,
4937 *and disenable parameters to the backend*/
4938 LOGD("set_parms META_STREAM_INFO " );
4939 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004940 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4941 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004942 mStreamConfigInfo.type[i],
4943 mStreamConfigInfo.stream_sizes[i].width,
4944 mStreamConfigInfo.stream_sizes[i].height,
4945 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004946 mStreamConfigInfo.format[i],
4947 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004948 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004949
Thierry Strudel3d639192016-09-09 11:52:26 -07004950 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4951 mParameters);
4952 if (rc < 0) {
4953 LOGE("set_parms failed for hal version, stream info");
4954 }
4955
Chien-Yu Chenee335912017-02-09 17:53:20 -08004956 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4957 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004958 if (rc != NO_ERROR) {
4959 LOGE("Failed to get sensor output size");
4960 pthread_mutex_unlock(&mMutex);
4961 goto error_exit;
4962 }
4963
4964 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4965 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004966 mSensorModeInfo.active_array_size.width,
4967 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004968
4969 /* Set batchmode before initializing channel. Since registerBuffer
4970 * internally initializes some of the channels, better set batchmode
4971 * even before first register buffer */
4972 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4973 it != mStreamInfo.end(); it++) {
4974 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4975 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4976 && mBatchSize) {
4977 rc = channel->setBatchSize(mBatchSize);
4978 //Disable per frame map unmap for HFR/batchmode case
4979 rc |= channel->setPerFrameMapUnmap(false);
4980 if (NO_ERROR != rc) {
4981 LOGE("Channel init failed %d", rc);
4982 pthread_mutex_unlock(&mMutex);
4983 goto error_exit;
4984 }
4985 }
4986 }
4987
4988 //First initialize all streams
4989 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4990 it != mStreamInfo.end(); it++) {
4991 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004992
4993 /* Initial value of NR mode is needed before stream on */
4994 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07004995 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4996 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004997 setEis) {
4998 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4999 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5000 is_type = mStreamConfigInfo.is_type[i];
5001 break;
5002 }
5003 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005005 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005006 rc = channel->initialize(IS_TYPE_NONE);
5007 }
5008 if (NO_ERROR != rc) {
5009 LOGE("Channel initialization failed %d", rc);
5010 pthread_mutex_unlock(&mMutex);
5011 goto error_exit;
5012 }
5013 }
5014
5015 if (mRawDumpChannel) {
5016 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5017 if (rc != NO_ERROR) {
5018 LOGE("Error: Raw Dump Channel init failed");
5019 pthread_mutex_unlock(&mMutex);
5020 goto error_exit;
5021 }
5022 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005023 if (mHdrPlusRawSrcChannel) {
5024 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5025 if (rc != NO_ERROR) {
5026 LOGE("Error: HDR+ RAW Source Channel init failed");
5027 pthread_mutex_unlock(&mMutex);
5028 goto error_exit;
5029 }
5030 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005031 if (mSupportChannel) {
5032 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5033 if (rc < 0) {
5034 LOGE("Support channel initialization failed");
5035 pthread_mutex_unlock(&mMutex);
5036 goto error_exit;
5037 }
5038 }
5039 if (mAnalysisChannel) {
5040 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5041 if (rc < 0) {
5042 LOGE("Analysis channel initialization failed");
5043 pthread_mutex_unlock(&mMutex);
5044 goto error_exit;
5045 }
5046 }
5047 if (mDummyBatchChannel) {
5048 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5049 if (rc < 0) {
5050 LOGE("mDummyBatchChannel setBatchSize failed");
5051 pthread_mutex_unlock(&mMutex);
5052 goto error_exit;
5053 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005054 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005055 if (rc < 0) {
5056 LOGE("mDummyBatchChannel initialization failed");
5057 pthread_mutex_unlock(&mMutex);
5058 goto error_exit;
5059 }
5060 }
5061
5062 // Set bundle info
5063 rc = setBundleInfo();
5064 if (rc < 0) {
5065 LOGE("setBundleInfo failed %d", rc);
5066 pthread_mutex_unlock(&mMutex);
5067 goto error_exit;
5068 }
5069
5070 //update settings from app here
5071 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5072 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5073 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5074 }
5075 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5076 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5077 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5078 }
5079 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5080 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5081 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5082
5083 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5084 (mLinkedCameraId != mCameraId) ) {
5085 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5086 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005087 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005088 goto error_exit;
5089 }
5090 }
5091
5092 // add bundle related cameras
5093 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5094 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005095 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5096 &m_pDualCamCmdPtr->bundle_info;
5097 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005098 if (mIsDeviceLinked)
5099 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5100 else
5101 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5102
5103 pthread_mutex_lock(&gCamLock);
5104
5105 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5106 LOGE("Dualcam: Invalid Session Id ");
5107 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005108 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005109 goto error_exit;
5110 }
5111
5112 if (mIsMainCamera == 1) {
5113 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5114 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005115 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005116 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005117 // related session id should be session id of linked session
5118 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5119 } else {
5120 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5121 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005122 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005123 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005124 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5125 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005126 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005127 pthread_mutex_unlock(&gCamLock);
5128
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005129 rc = mCameraHandle->ops->set_dual_cam_cmd(
5130 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005131 if (rc < 0) {
5132 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005133 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005134 goto error_exit;
5135 }
5136 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005137 goto no_error;
5138error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005139 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005140 return rc;
5141no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005142 mWokenUpByDaemon = false;
5143 mPendingLiveRequest = 0;
5144 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005145 }
5146
5147 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005148 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005149
5150 if (mFlushPerf) {
5151 //we cannot accept any requests during flush
5152 LOGE("process_capture_request cannot proceed during flush");
5153 pthread_mutex_unlock(&mMutex);
5154 return NO_ERROR; //should return an error
5155 }
5156
5157 if (meta.exists(ANDROID_REQUEST_ID)) {
5158 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5159 mCurrentRequestId = request_id;
5160 LOGD("Received request with id: %d", request_id);
5161 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5162 LOGE("Unable to find request id field, \
5163 & no previous id available");
5164 pthread_mutex_unlock(&mMutex);
5165 return NAME_NOT_FOUND;
5166 } else {
5167 LOGD("Re-using old request id");
5168 request_id = mCurrentRequestId;
5169 }
5170
5171 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5172 request->num_output_buffers,
5173 request->input_buffer,
5174 frameNumber);
5175 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005176 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005177 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005178 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005179 uint32_t snapshotStreamId = 0;
5180 for (size_t i = 0; i < request->num_output_buffers; i++) {
5181 const camera3_stream_buffer_t& output = request->output_buffers[i];
5182 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5183
Emilian Peev7650c122017-01-19 08:24:33 -08005184 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5185 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005186 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005187 blob_request = 1;
5188 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5189 }
5190
5191 if (output.acquire_fence != -1) {
5192 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5193 close(output.acquire_fence);
5194 if (rc != OK) {
5195 LOGE("sync wait failed %d", rc);
5196 pthread_mutex_unlock(&mMutex);
5197 return rc;
5198 }
5199 }
5200
Emilian Peev0f3c3162017-03-15 12:57:46 +00005201 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5202 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005203 depthRequestPresent = true;
5204 continue;
5205 }
5206
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005207 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005208 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005209
5210 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5211 isVidBufRequested = true;
5212 }
5213 }
5214
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005215 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5216 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5217 itr++) {
5218 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5219 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5220 channel->getStreamID(channel->getStreamTypeMask());
5221
5222 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5223 isVidBufRequested = true;
5224 }
5225 }
5226
Thierry Strudel3d639192016-09-09 11:52:26 -07005227 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005228 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005229 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005230 }
5231 if (blob_request && mRawDumpChannel) {
5232 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005233 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005234 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005235 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005236 }
5237
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005238 {
5239 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5240 // Request a RAW buffer if
5241 // 1. mHdrPlusRawSrcChannel is valid.
5242 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5243 // 3. There is no pending HDR+ request.
5244 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5245 mHdrPlusPendingRequests.size() == 0) {
5246 streamsArray.stream_request[streamsArray.num_streams].streamID =
5247 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5248 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5249 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005250 }
5251
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005252 //extract capture intent
5253 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5254 mCaptureIntent =
5255 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5256 }
5257
5258 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5259 mCacMode =
5260 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5261 }
5262
5263 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005264 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005265
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005266 {
5267 Mutex::Autolock l(gHdrPlusClientLock);
5268 // If this request has a still capture intent, try to submit an HDR+ request.
5269 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5270 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5271 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5272 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005273 }
5274
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005275 if (hdrPlusRequest) {
5276 // For a HDR+ request, just set the frame parameters.
5277 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5278 if (rc < 0) {
5279 LOGE("fail to set frame parameters");
5280 pthread_mutex_unlock(&mMutex);
5281 return rc;
5282 }
5283 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005284 /* Parse the settings:
5285 * - For every request in NORMAL MODE
5286 * - For every request in HFR mode during preview only case
5287 * - For first request of every batch in HFR mode during video
5288 * recording. In batchmode the same settings except frame number is
5289 * repeated in each request of the batch.
5290 */
5291 if (!mBatchSize ||
5292 (mBatchSize && !isVidBufRequested) ||
5293 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005294 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005295 if (rc < 0) {
5296 LOGE("fail to set frame parameters");
5297 pthread_mutex_unlock(&mMutex);
5298 return rc;
5299 }
5300 }
5301 /* For batchMode HFR, setFrameParameters is not called for every
5302 * request. But only frame number of the latest request is parsed.
5303 * Keep track of first and last frame numbers in a batch so that
5304 * metadata for the frame numbers of batch can be duplicated in
5305 * handleBatchMetadta */
5306 if (mBatchSize) {
5307 if (!mToBeQueuedVidBufs) {
5308 //start of the batch
5309 mFirstFrameNumberInBatch = request->frame_number;
5310 }
5311 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5312 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5313 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005314 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005315 return BAD_VALUE;
5316 }
5317 }
5318 if (mNeedSensorRestart) {
5319 /* Unlock the mutex as restartSensor waits on the channels to be
5320 * stopped, which in turn calls stream callback functions -
5321 * handleBufferWithLock and handleMetadataWithLock */
5322 pthread_mutex_unlock(&mMutex);
5323 rc = dynamicUpdateMetaStreamInfo();
5324 if (rc != NO_ERROR) {
5325 LOGE("Restarting the sensor failed");
5326 return BAD_VALUE;
5327 }
5328 mNeedSensorRestart = false;
5329 pthread_mutex_lock(&mMutex);
5330 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005331 if(mResetInstantAEC) {
5332 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5333 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5334 mResetInstantAEC = false;
5335 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005336 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005337 if (request->input_buffer->acquire_fence != -1) {
5338 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5339 close(request->input_buffer->acquire_fence);
5340 if (rc != OK) {
5341 LOGE("input buffer sync wait failed %d", rc);
5342 pthread_mutex_unlock(&mMutex);
5343 return rc;
5344 }
5345 }
5346 }
5347
5348 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5349 mLastCustIntentFrmNum = frameNumber;
5350 }
5351 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005352 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005353 pendingRequestIterator latestRequest;
5354 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005355 pendingRequest.num_buffers = depthRequestPresent ?
5356 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005357 pendingRequest.request_id = request_id;
5358 pendingRequest.blob_request = blob_request;
5359 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005360 if (request->input_buffer) {
5361 pendingRequest.input_buffer =
5362 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5363 *(pendingRequest.input_buffer) = *(request->input_buffer);
5364 pInputBuffer = pendingRequest.input_buffer;
5365 } else {
5366 pendingRequest.input_buffer = NULL;
5367 pInputBuffer = NULL;
5368 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005369 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005370
5371 pendingRequest.pipeline_depth = 0;
5372 pendingRequest.partial_result_cnt = 0;
5373 extractJpegMetadata(mCurJpegMeta, request);
5374 pendingRequest.jpegMetadata = mCurJpegMeta;
5375 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005376 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005377 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5378 mHybridAeEnable =
5379 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5380 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005381
5382 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5383 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005384 /* DevCamDebug metadata processCaptureRequest */
5385 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5386 mDevCamDebugMetaEnable =
5387 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5388 }
5389 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5390 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005391
5392 //extract CAC info
5393 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5394 mCacMode =
5395 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5396 }
5397 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005398 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005399
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005400 // extract enableZsl info
5401 if (gExposeEnableZslKey) {
5402 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5403 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5404 mZslEnabled = pendingRequest.enableZsl;
5405 } else {
5406 pendingRequest.enableZsl = mZslEnabled;
5407 }
5408 }
5409
Thierry Strudel3d639192016-09-09 11:52:26 -07005410 PendingBuffersInRequest bufsForCurRequest;
5411 bufsForCurRequest.frame_number = frameNumber;
5412 // Mark current timestamp for the new request
5413 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005414 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005415
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005416 if (hdrPlusRequest) {
5417 // Save settings for this request.
5418 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5419 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5420
5421 // Add to pending HDR+ request queue.
5422 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5423 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5424
5425 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5426 }
5427
Thierry Strudel3d639192016-09-09 11:52:26 -07005428 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005429 if ((request->output_buffers[i].stream->data_space ==
5430 HAL_DATASPACE_DEPTH) &&
5431 (HAL_PIXEL_FORMAT_BLOB ==
5432 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005433 continue;
5434 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005435 RequestedBufferInfo requestedBuf;
5436 memset(&requestedBuf, 0, sizeof(requestedBuf));
5437 requestedBuf.stream = request->output_buffers[i].stream;
5438 requestedBuf.buffer = NULL;
5439 pendingRequest.buffers.push_back(requestedBuf);
5440
5441 // Add to buffer handle the pending buffers list
5442 PendingBufferInfo bufferInfo;
5443 bufferInfo.buffer = request->output_buffers[i].buffer;
5444 bufferInfo.stream = request->output_buffers[i].stream;
5445 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5446 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5447 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5448 frameNumber, bufferInfo.buffer,
5449 channel->getStreamTypeMask(), bufferInfo.stream->format);
5450 }
5451 // Add this request packet into mPendingBuffersMap
5452 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5453 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5454 mPendingBuffersMap.get_num_overall_buffers());
5455
5456 latestRequest = mPendingRequestsList.insert(
5457 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005458
5459 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5460 // for the frame number.
5461 mShutterDispatcher.expectShutter(frameNumber);
5462 for (size_t i = 0; i < request->num_output_buffers; i++) {
5463 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5464 }
5465
Thierry Strudel3d639192016-09-09 11:52:26 -07005466 if(mFlush) {
5467 LOGI("mFlush is true");
5468 pthread_mutex_unlock(&mMutex);
5469 return NO_ERROR;
5470 }
5471
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005472 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5473 // channel.
5474 if (!hdrPlusRequest) {
5475 int indexUsed;
5476 // Notify metadata channel we receive a request
5477 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005478
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005479 if(request->input_buffer != NULL){
5480 LOGD("Input request, frame_number %d", frameNumber);
5481 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5482 if (NO_ERROR != rc) {
5483 LOGE("fail to set reproc parameters");
5484 pthread_mutex_unlock(&mMutex);
5485 return rc;
5486 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005487 }
5488
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005489 // Call request on other streams
5490 uint32_t streams_need_metadata = 0;
5491 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5492 for (size_t i = 0; i < request->num_output_buffers; i++) {
5493 const camera3_stream_buffer_t& output = request->output_buffers[i];
5494 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5495
5496 if (channel == NULL) {
5497 LOGW("invalid channel pointer for stream");
5498 continue;
5499 }
5500
5501 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5502 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5503 output.buffer, request->input_buffer, frameNumber);
5504 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005505 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005506 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5507 if (rc < 0) {
5508 LOGE("Fail to request on picture channel");
5509 pthread_mutex_unlock(&mMutex);
5510 return rc;
5511 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005512 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005513 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5514 assert(NULL != mDepthChannel);
5515 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005516
Emilian Peev7650c122017-01-19 08:24:33 -08005517 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5518 if (rc < 0) {
5519 LOGE("Fail to map on depth buffer");
5520 pthread_mutex_unlock(&mMutex);
5521 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005522 }
Emilian Peev7650c122017-01-19 08:24:33 -08005523 } else {
5524 LOGD("snapshot request with buffer %p, frame_number %d",
5525 output.buffer, frameNumber);
5526 if (!request->settings) {
5527 rc = channel->request(output.buffer, frameNumber,
5528 NULL, mPrevParameters, indexUsed);
5529 } else {
5530 rc = channel->request(output.buffer, frameNumber,
5531 NULL, mParameters, indexUsed);
5532 }
5533 if (rc < 0) {
5534 LOGE("Fail to request on picture channel");
5535 pthread_mutex_unlock(&mMutex);
5536 return rc;
5537 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005538
Emilian Peev7650c122017-01-19 08:24:33 -08005539 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5540 uint32_t j = 0;
5541 for (j = 0; j < streamsArray.num_streams; j++) {
5542 if (streamsArray.stream_request[j].streamID == streamId) {
5543 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5544 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5545 else
5546 streamsArray.stream_request[j].buf_index = indexUsed;
5547 break;
5548 }
5549 }
5550 if (j == streamsArray.num_streams) {
5551 LOGE("Did not find matching stream to update index");
5552 assert(0);
5553 }
5554
5555 pendingBufferIter->need_metadata = true;
5556 streams_need_metadata++;
5557 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005558 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005559 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5560 bool needMetadata = false;
5561 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5562 rc = yuvChannel->request(output.buffer, frameNumber,
5563 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5564 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005565 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005566 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005567 pthread_mutex_unlock(&mMutex);
5568 return rc;
5569 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005570
5571 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5572 uint32_t j = 0;
5573 for (j = 0; j < streamsArray.num_streams; j++) {
5574 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005575 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5576 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5577 else
5578 streamsArray.stream_request[j].buf_index = indexUsed;
5579 break;
5580 }
5581 }
5582 if (j == streamsArray.num_streams) {
5583 LOGE("Did not find matching stream to update index");
5584 assert(0);
5585 }
5586
5587 pendingBufferIter->need_metadata = needMetadata;
5588 if (needMetadata)
5589 streams_need_metadata += 1;
5590 LOGD("calling YUV channel request, need_metadata is %d",
5591 needMetadata);
5592 } else {
5593 LOGD("request with buffer %p, frame_number %d",
5594 output.buffer, frameNumber);
5595
5596 rc = channel->request(output.buffer, frameNumber, indexUsed);
5597
5598 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5599 uint32_t j = 0;
5600 for (j = 0; j < streamsArray.num_streams; j++) {
5601 if (streamsArray.stream_request[j].streamID == streamId) {
5602 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5603 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5604 else
5605 streamsArray.stream_request[j].buf_index = indexUsed;
5606 break;
5607 }
5608 }
5609 if (j == streamsArray.num_streams) {
5610 LOGE("Did not find matching stream to update index");
5611 assert(0);
5612 }
5613
5614 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5615 && mBatchSize) {
5616 mToBeQueuedVidBufs++;
5617 if (mToBeQueuedVidBufs == mBatchSize) {
5618 channel->queueBatchBuf();
5619 }
5620 }
5621 if (rc < 0) {
5622 LOGE("request failed");
5623 pthread_mutex_unlock(&mMutex);
5624 return rc;
5625 }
5626 }
5627 pendingBufferIter++;
5628 }
5629
5630 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5631 itr++) {
5632 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5633
5634 if (channel == NULL) {
5635 LOGE("invalid channel pointer for stream");
5636 assert(0);
5637 return BAD_VALUE;
5638 }
5639
5640 InternalRequest requestedStream;
5641 requestedStream = (*itr);
5642
5643
5644 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5645 LOGD("snapshot request internally input buffer %p, frame_number %d",
5646 request->input_buffer, frameNumber);
5647 if(request->input_buffer != NULL){
5648 rc = channel->request(NULL, frameNumber,
5649 pInputBuffer, &mReprocMeta, indexUsed, true,
5650 requestedStream.meteringOnly);
5651 if (rc < 0) {
5652 LOGE("Fail to request on picture channel");
5653 pthread_mutex_unlock(&mMutex);
5654 return rc;
5655 }
5656 } else {
5657 LOGD("snapshot request with frame_number %d", frameNumber);
5658 if (!request->settings) {
5659 rc = channel->request(NULL, frameNumber,
5660 NULL, mPrevParameters, indexUsed, true,
5661 requestedStream.meteringOnly);
5662 } else {
5663 rc = channel->request(NULL, frameNumber,
5664 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5665 }
5666 if (rc < 0) {
5667 LOGE("Fail to request on picture channel");
5668 pthread_mutex_unlock(&mMutex);
5669 return rc;
5670 }
5671
5672 if ((*itr).meteringOnly != 1) {
5673 requestedStream.need_metadata = 1;
5674 streams_need_metadata++;
5675 }
5676 }
5677
5678 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5679 uint32_t j = 0;
5680 for (j = 0; j < streamsArray.num_streams; j++) {
5681 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005682 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5683 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5684 else
5685 streamsArray.stream_request[j].buf_index = indexUsed;
5686 break;
5687 }
5688 }
5689 if (j == streamsArray.num_streams) {
5690 LOGE("Did not find matching stream to update index");
5691 assert(0);
5692 }
5693
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005694 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005695 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005696 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005697 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005698 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005699 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005700 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005701
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005702 //If 2 streams have need_metadata set to true, fail the request, unless
5703 //we copy/reference count the metadata buffer
5704 if (streams_need_metadata > 1) {
5705 LOGE("not supporting request in which two streams requires"
5706 " 2 HAL metadata for reprocessing");
5707 pthread_mutex_unlock(&mMutex);
5708 return -EINVAL;
5709 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005710
Emilian Peev7650c122017-01-19 08:24:33 -08005711 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5712 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5713 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5714 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5715 pthread_mutex_unlock(&mMutex);
5716 return BAD_VALUE;
5717 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005718 if (request->input_buffer == NULL) {
5719 /* Set the parameters to backend:
5720 * - For every request in NORMAL MODE
5721 * - For every request in HFR mode during preview only case
5722 * - Once every batch in HFR mode during video recording
5723 */
5724 if (!mBatchSize ||
5725 (mBatchSize && !isVidBufRequested) ||
5726 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5727 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5728 mBatchSize, isVidBufRequested,
5729 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005730
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005731 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5732 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5733 uint32_t m = 0;
5734 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5735 if (streamsArray.stream_request[k].streamID ==
5736 mBatchedStreamsArray.stream_request[m].streamID)
5737 break;
5738 }
5739 if (m == mBatchedStreamsArray.num_streams) {
5740 mBatchedStreamsArray.stream_request\
5741 [mBatchedStreamsArray.num_streams].streamID =
5742 streamsArray.stream_request[k].streamID;
5743 mBatchedStreamsArray.stream_request\
5744 [mBatchedStreamsArray.num_streams].buf_index =
5745 streamsArray.stream_request[k].buf_index;
5746 mBatchedStreamsArray.num_streams =
5747 mBatchedStreamsArray.num_streams + 1;
5748 }
5749 }
5750 streamsArray = mBatchedStreamsArray;
5751 }
5752 /* Update stream id of all the requested buffers */
5753 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5754 streamsArray)) {
5755 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005756 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005757 return BAD_VALUE;
5758 }
5759
5760 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5761 mParameters);
5762 if (rc < 0) {
5763 LOGE("set_parms failed");
5764 }
5765 /* reset to zero coz, the batch is queued */
5766 mToBeQueuedVidBufs = 0;
5767 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5768 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5769 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005770 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5771 uint32_t m = 0;
5772 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5773 if (streamsArray.stream_request[k].streamID ==
5774 mBatchedStreamsArray.stream_request[m].streamID)
5775 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005776 }
5777 if (m == mBatchedStreamsArray.num_streams) {
5778 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5779 streamID = streamsArray.stream_request[k].streamID;
5780 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5781 buf_index = streamsArray.stream_request[k].buf_index;
5782 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5783 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005784 }
5785 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005786 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005787
5788 // Start all streams after the first setting is sent, so that the
5789 // setting can be applied sooner: (0 + apply_delay)th frame.
5790 if (mState == CONFIGURED && mChannelHandle) {
5791 //Then start them.
5792 LOGH("Start META Channel");
5793 rc = mMetadataChannel->start();
5794 if (rc < 0) {
5795 LOGE("META channel start failed");
5796 pthread_mutex_unlock(&mMutex);
5797 return rc;
5798 }
5799
5800 if (mAnalysisChannel) {
5801 rc = mAnalysisChannel->start();
5802 if (rc < 0) {
5803 LOGE("Analysis channel start failed");
5804 mMetadataChannel->stop();
5805 pthread_mutex_unlock(&mMutex);
5806 return rc;
5807 }
5808 }
5809
5810 if (mSupportChannel) {
5811 rc = mSupportChannel->start();
5812 if (rc < 0) {
5813 LOGE("Support channel start failed");
5814 mMetadataChannel->stop();
5815 /* Although support and analysis are mutually exclusive today
5816 adding it in anycase for future proofing */
5817 if (mAnalysisChannel) {
5818 mAnalysisChannel->stop();
5819 }
5820 pthread_mutex_unlock(&mMutex);
5821 return rc;
5822 }
5823 }
5824 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5825 it != mStreamInfo.end(); it++) {
5826 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5827 LOGH("Start Processing Channel mask=%d",
5828 channel->getStreamTypeMask());
5829 rc = channel->start();
5830 if (rc < 0) {
5831 LOGE("channel start failed");
5832 pthread_mutex_unlock(&mMutex);
5833 return rc;
5834 }
5835 }
5836
5837 if (mRawDumpChannel) {
5838 LOGD("Starting raw dump stream");
5839 rc = mRawDumpChannel->start();
5840 if (rc != NO_ERROR) {
5841 LOGE("Error Starting Raw Dump Channel");
5842 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5843 it != mStreamInfo.end(); it++) {
5844 QCamera3Channel *channel =
5845 (QCamera3Channel *)(*it)->stream->priv;
5846 LOGH("Stopping Processing Channel mask=%d",
5847 channel->getStreamTypeMask());
5848 channel->stop();
5849 }
5850 if (mSupportChannel)
5851 mSupportChannel->stop();
5852 if (mAnalysisChannel) {
5853 mAnalysisChannel->stop();
5854 }
5855 mMetadataChannel->stop();
5856 pthread_mutex_unlock(&mMutex);
5857 return rc;
5858 }
5859 }
5860
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005861 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005862 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005863 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005864 if (rc != NO_ERROR) {
5865 LOGE("start_channel failed %d", rc);
5866 pthread_mutex_unlock(&mMutex);
5867 return rc;
5868 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005869
5870 {
5871 // Configure Easel for stream on.
5872 Mutex::Autolock l(gHdrPlusClientLock);
5873 if (EaselManagerClientOpened) {
5874 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chena6c99062017-05-23 13:45:06 -07005875 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5876 /*enableIpu*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005877 if (rc != OK) {
5878 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5879 mCameraId, mSensorModeInfo.op_pixel_clk);
5880 pthread_mutex_unlock(&mMutex);
5881 return rc;
5882 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005883 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005884 }
5885 }
5886
5887 // Start sensor streaming.
5888 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5889 mChannelHandle);
5890 if (rc != NO_ERROR) {
5891 LOGE("start_sensor_stream_on failed %d", rc);
5892 pthread_mutex_unlock(&mMutex);
5893 return rc;
5894 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005895 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005896 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005897 }
5898
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005899 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07005900 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005901 Mutex::Autolock l(gHdrPlusClientLock);
5902 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5903 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5904 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5905 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5906 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5907 rc = enableHdrPlusModeLocked();
5908 if (rc != OK) {
5909 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5910 pthread_mutex_unlock(&mMutex);
5911 return rc;
5912 }
5913
5914 mFirstPreviewIntentSeen = true;
5915 }
5916 }
5917
Thierry Strudel3d639192016-09-09 11:52:26 -07005918 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5919
5920 mState = STARTED;
5921 // Added a timed condition wait
5922 struct timespec ts;
5923 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005924 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005925 if (rc < 0) {
5926 isValidTimeout = 0;
5927 LOGE("Error reading the real time clock!!");
5928 }
5929 else {
5930 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005931 int64_t timeout = 5;
5932 {
5933 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5934 // If there is a pending HDR+ request, the following requests may be blocked until the
5935 // HDR+ request is done. So allow a longer timeout.
5936 if (mHdrPlusPendingRequests.size() > 0) {
5937 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5938 }
5939 }
5940 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005941 }
5942 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005943 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005944 (mState != ERROR) && (mState != DEINIT)) {
5945 if (!isValidTimeout) {
5946 LOGD("Blocking on conditional wait");
5947 pthread_cond_wait(&mRequestCond, &mMutex);
5948 }
5949 else {
5950 LOGD("Blocking on timed conditional wait");
5951 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5952 if (rc == ETIMEDOUT) {
5953 rc = -ENODEV;
5954 LOGE("Unblocked on timeout!!!!");
5955 break;
5956 }
5957 }
5958 LOGD("Unblocked");
5959 if (mWokenUpByDaemon) {
5960 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005961 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005962 break;
5963 }
5964 }
5965 pthread_mutex_unlock(&mMutex);
5966
5967 return rc;
5968}
5969
5970/*===========================================================================
5971 * FUNCTION : dump
5972 *
5973 * DESCRIPTION:
5974 *
5975 * PARAMETERS :
5976 *
5977 *
5978 * RETURN :
5979 *==========================================================================*/
5980void QCamera3HardwareInterface::dump(int fd)
5981{
5982 pthread_mutex_lock(&mMutex);
5983 dprintf(fd, "\n Camera HAL3 information Begin \n");
5984
5985 dprintf(fd, "\nNumber of pending requests: %zu \n",
5986 mPendingRequestsList.size());
5987 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5988 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5989 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5990 for(pendingRequestIterator i = mPendingRequestsList.begin();
5991 i != mPendingRequestsList.end(); i++) {
5992 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5993 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5994 i->input_buffer);
5995 }
5996 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5997 mPendingBuffersMap.get_num_overall_buffers());
5998 dprintf(fd, "-------+------------------\n");
5999 dprintf(fd, " Frame | Stream type mask \n");
6000 dprintf(fd, "-------+------------------\n");
6001 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6002 for(auto &j : req.mPendingBufferList) {
6003 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6004 dprintf(fd, " %5d | %11d \n",
6005 req.frame_number, channel->getStreamTypeMask());
6006 }
6007 }
6008 dprintf(fd, "-------+------------------\n");
6009
6010 dprintf(fd, "\nPending frame drop list: %zu\n",
6011 mPendingFrameDropList.size());
6012 dprintf(fd, "-------+-----------\n");
6013 dprintf(fd, " Frame | Stream ID \n");
6014 dprintf(fd, "-------+-----------\n");
6015 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6016 i != mPendingFrameDropList.end(); i++) {
6017 dprintf(fd, " %5d | %9d \n",
6018 i->frame_number, i->stream_ID);
6019 }
6020 dprintf(fd, "-------+-----------\n");
6021
6022 dprintf(fd, "\n Camera HAL3 information End \n");
6023
6024 /* use dumpsys media.camera as trigger to send update debug level event */
6025 mUpdateDebugLevel = true;
6026 pthread_mutex_unlock(&mMutex);
6027 return;
6028}
6029
6030/*===========================================================================
6031 * FUNCTION : flush
6032 *
6033 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6034 * conditionally restarts channels
6035 *
6036 * PARAMETERS :
6037 * @ restartChannels: re-start all channels
6038 *
6039 *
6040 * RETURN :
6041 * 0 on success
6042 * Error code on failure
6043 *==========================================================================*/
6044int QCamera3HardwareInterface::flush(bool restartChannels)
6045{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006046 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006047 int32_t rc = NO_ERROR;
6048
6049 LOGD("Unblocking Process Capture Request");
6050 pthread_mutex_lock(&mMutex);
6051 mFlush = true;
6052 pthread_mutex_unlock(&mMutex);
6053
6054 rc = stopAllChannels();
6055 // unlink of dualcam
6056 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006057 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6058 &m_pDualCamCmdPtr->bundle_info;
6059 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006060 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6061 pthread_mutex_lock(&gCamLock);
6062
6063 if (mIsMainCamera == 1) {
6064 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6065 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006066 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006067 // related session id should be session id of linked session
6068 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6069 } else {
6070 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6071 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006072 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006073 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6074 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006075 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006076 pthread_mutex_unlock(&gCamLock);
6077
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006078 rc = mCameraHandle->ops->set_dual_cam_cmd(
6079 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006080 if (rc < 0) {
6081 LOGE("Dualcam: Unlink failed, but still proceed to close");
6082 }
6083 }
6084
6085 if (rc < 0) {
6086 LOGE("stopAllChannels failed");
6087 return rc;
6088 }
6089 if (mChannelHandle) {
6090 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6091 mChannelHandle);
6092 }
6093
6094 // Reset bundle info
6095 rc = setBundleInfo();
6096 if (rc < 0) {
6097 LOGE("setBundleInfo failed %d", rc);
6098 return rc;
6099 }
6100
6101 // Mutex Lock
6102 pthread_mutex_lock(&mMutex);
6103
6104 // Unblock process_capture_request
6105 mPendingLiveRequest = 0;
6106 pthread_cond_signal(&mRequestCond);
6107
6108 rc = notifyErrorForPendingRequests();
6109 if (rc < 0) {
6110 LOGE("notifyErrorForPendingRequests failed");
6111 pthread_mutex_unlock(&mMutex);
6112 return rc;
6113 }
6114
6115 mFlush = false;
6116
6117 // Start the Streams/Channels
6118 if (restartChannels) {
6119 rc = startAllChannels();
6120 if (rc < 0) {
6121 LOGE("startAllChannels failed");
6122 pthread_mutex_unlock(&mMutex);
6123 return rc;
6124 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006125 if (mChannelHandle) {
6126 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006127 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006128 if (rc < 0) {
6129 LOGE("start_channel failed");
6130 pthread_mutex_unlock(&mMutex);
6131 return rc;
6132 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006133 }
6134 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006135 pthread_mutex_unlock(&mMutex);
6136
6137 return 0;
6138}
6139
6140/*===========================================================================
6141 * FUNCTION : flushPerf
6142 *
6143 * DESCRIPTION: This is the performance optimization version of flush that does
6144 * not use stream off, rather flushes the system
6145 *
6146 * PARAMETERS :
6147 *
6148 *
6149 * RETURN : 0 : success
6150 * -EINVAL: input is malformed (device is not valid)
6151 * -ENODEV: if the device has encountered a serious error
6152 *==========================================================================*/
6153int QCamera3HardwareInterface::flushPerf()
6154{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006155 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006156 int32_t rc = 0;
6157 struct timespec timeout;
6158 bool timed_wait = false;
6159
6160 pthread_mutex_lock(&mMutex);
6161 mFlushPerf = true;
6162 mPendingBuffersMap.numPendingBufsAtFlush =
6163 mPendingBuffersMap.get_num_overall_buffers();
6164 LOGD("Calling flush. Wait for %d buffers to return",
6165 mPendingBuffersMap.numPendingBufsAtFlush);
6166
6167 /* send the flush event to the backend */
6168 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6169 if (rc < 0) {
6170 LOGE("Error in flush: IOCTL failure");
6171 mFlushPerf = false;
6172 pthread_mutex_unlock(&mMutex);
6173 return -ENODEV;
6174 }
6175
6176 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6177 LOGD("No pending buffers in HAL, return flush");
6178 mFlushPerf = false;
6179 pthread_mutex_unlock(&mMutex);
6180 return rc;
6181 }
6182
6183 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006184 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006185 if (rc < 0) {
6186 LOGE("Error reading the real time clock, cannot use timed wait");
6187 } else {
6188 timeout.tv_sec += FLUSH_TIMEOUT;
6189 timed_wait = true;
6190 }
6191
6192 //Block on conditional variable
6193 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6194 LOGD("Waiting on mBuffersCond");
6195 if (!timed_wait) {
6196 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6197 if (rc != 0) {
6198 LOGE("pthread_cond_wait failed due to rc = %s",
6199 strerror(rc));
6200 break;
6201 }
6202 } else {
6203 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6204 if (rc != 0) {
6205 LOGE("pthread_cond_timedwait failed due to rc = %s",
6206 strerror(rc));
6207 break;
6208 }
6209 }
6210 }
6211 if (rc != 0) {
6212 mFlushPerf = false;
6213 pthread_mutex_unlock(&mMutex);
6214 return -ENODEV;
6215 }
6216
6217 LOGD("Received buffers, now safe to return them");
6218
6219 //make sure the channels handle flush
6220 //currently only required for the picture channel to release snapshot resources
6221 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6222 it != mStreamInfo.end(); it++) {
6223 QCamera3Channel *channel = (*it)->channel;
6224 if (channel) {
6225 rc = channel->flush();
6226 if (rc) {
6227 LOGE("Flushing the channels failed with error %d", rc);
6228 // even though the channel flush failed we need to continue and
6229 // return the buffers we have to the framework, however the return
6230 // value will be an error
6231 rc = -ENODEV;
6232 }
6233 }
6234 }
6235
6236 /* notify the frameworks and send errored results */
6237 rc = notifyErrorForPendingRequests();
6238 if (rc < 0) {
6239 LOGE("notifyErrorForPendingRequests failed");
6240 pthread_mutex_unlock(&mMutex);
6241 return rc;
6242 }
6243
6244 //unblock process_capture_request
6245 mPendingLiveRequest = 0;
6246 unblockRequestIfNecessary();
6247
6248 mFlushPerf = false;
6249 pthread_mutex_unlock(&mMutex);
6250 LOGD ("Flush Operation complete. rc = %d", rc);
6251 return rc;
6252}
6253
6254/*===========================================================================
6255 * FUNCTION : handleCameraDeviceError
6256 *
6257 * DESCRIPTION: This function calls internal flush and notifies the error to
6258 * framework and updates the state variable.
6259 *
6260 * PARAMETERS : None
6261 *
6262 * RETURN : NO_ERROR on Success
6263 * Error code on failure
6264 *==========================================================================*/
6265int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6266{
6267 int32_t rc = NO_ERROR;
6268
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006269 {
6270 Mutex::Autolock lock(mFlushLock);
6271 pthread_mutex_lock(&mMutex);
6272 if (mState != ERROR) {
6273 //if mState != ERROR, nothing to be done
6274 pthread_mutex_unlock(&mMutex);
6275 return NO_ERROR;
6276 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006277 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006278
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006279 rc = flush(false /* restart channels */);
6280 if (NO_ERROR != rc) {
6281 LOGE("internal flush to handle mState = ERROR failed");
6282 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006283
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006284 pthread_mutex_lock(&mMutex);
6285 mState = DEINIT;
6286 pthread_mutex_unlock(&mMutex);
6287 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006288
6289 camera3_notify_msg_t notify_msg;
6290 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6291 notify_msg.type = CAMERA3_MSG_ERROR;
6292 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6293 notify_msg.message.error.error_stream = NULL;
6294 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006295 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006296
6297 return rc;
6298}
6299
6300/*===========================================================================
6301 * FUNCTION : captureResultCb
6302 *
6303 * DESCRIPTION: Callback handler for all capture result
6304 * (streams, as well as metadata)
6305 *
6306 * PARAMETERS :
6307 * @metadata : metadata information
6308 * @buffer : actual gralloc buffer to be returned to frameworks.
6309 * NULL if metadata.
6310 *
6311 * RETURN : NONE
6312 *==========================================================================*/
6313void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6314 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6315{
6316 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006317 pthread_mutex_lock(&mMutex);
6318 uint8_t batchSize = mBatchSize;
6319 pthread_mutex_unlock(&mMutex);
6320 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006321 handleBatchMetadata(metadata_buf,
6322 true /* free_and_bufdone_meta_buf */);
6323 } else { /* mBatchSize = 0 */
6324 hdrPlusPerfLock(metadata_buf);
6325 pthread_mutex_lock(&mMutex);
6326 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006327 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006328 true /* last urgent frame of batch metadata */,
6329 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006330 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006331 pthread_mutex_unlock(&mMutex);
6332 }
6333 } else if (isInputBuffer) {
6334 pthread_mutex_lock(&mMutex);
6335 handleInputBufferWithLock(frame_number);
6336 pthread_mutex_unlock(&mMutex);
6337 } else {
6338 pthread_mutex_lock(&mMutex);
6339 handleBufferWithLock(buffer, frame_number);
6340 pthread_mutex_unlock(&mMutex);
6341 }
6342 return;
6343}
6344
6345/*===========================================================================
6346 * FUNCTION : getReprocessibleOutputStreamId
6347 *
6348 * DESCRIPTION: Get source output stream id for the input reprocess stream
6349 * based on size and format, which would be the largest
6350 * output stream if an input stream exists.
6351 *
6352 * PARAMETERS :
6353 * @id : return the stream id if found
6354 *
6355 * RETURN : int32_t type of status
6356 * NO_ERROR -- success
6357 * none-zero failure code
6358 *==========================================================================*/
6359int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6360{
6361 /* check if any output or bidirectional stream with the same size and format
6362 and return that stream */
6363 if ((mInputStreamInfo.dim.width > 0) &&
6364 (mInputStreamInfo.dim.height > 0)) {
6365 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6366 it != mStreamInfo.end(); it++) {
6367
6368 camera3_stream_t *stream = (*it)->stream;
6369 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6370 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6371 (stream->format == mInputStreamInfo.format)) {
6372 // Usage flag for an input stream and the source output stream
6373 // may be different.
6374 LOGD("Found reprocessible output stream! %p", *it);
6375 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6376 stream->usage, mInputStreamInfo.usage);
6377
6378 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6379 if (channel != NULL && channel->mStreams[0]) {
6380 id = channel->mStreams[0]->getMyServerID();
6381 return NO_ERROR;
6382 }
6383 }
6384 }
6385 } else {
6386 LOGD("No input stream, so no reprocessible output stream");
6387 }
6388 return NAME_NOT_FOUND;
6389}
6390
6391/*===========================================================================
6392 * FUNCTION : lookupFwkName
6393 *
6394 * DESCRIPTION: In case the enum is not same in fwk and backend
6395 * make sure the parameter is correctly propogated
6396 *
6397 * PARAMETERS :
6398 * @arr : map between the two enums
6399 * @len : len of the map
6400 * @hal_name : name of the hal_parm to map
6401 *
6402 * RETURN : int type of status
6403 * fwk_name -- success
6404 * none-zero failure code
6405 *==========================================================================*/
6406template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6407 size_t len, halType hal_name)
6408{
6409
6410 for (size_t i = 0; i < len; i++) {
6411 if (arr[i].hal_name == hal_name) {
6412 return arr[i].fwk_name;
6413 }
6414 }
6415
6416 /* Not able to find matching framework type is not necessarily
6417 * an error case. This happens when mm-camera supports more attributes
6418 * than the frameworks do */
6419 LOGH("Cannot find matching framework type");
6420 return NAME_NOT_FOUND;
6421}
6422
6423/*===========================================================================
6424 * FUNCTION : lookupHalName
6425 *
6426 * DESCRIPTION: In case the enum is not same in fwk and backend
6427 * make sure the parameter is correctly propogated
6428 *
6429 * PARAMETERS :
6430 * @arr : map between the two enums
6431 * @len : len of the map
6432 * @fwk_name : name of the hal_parm to map
6433 *
6434 * RETURN : int32_t type of status
6435 * hal_name -- success
6436 * none-zero failure code
6437 *==========================================================================*/
6438template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6439 size_t len, fwkType fwk_name)
6440{
6441 for (size_t i = 0; i < len; i++) {
6442 if (arr[i].fwk_name == fwk_name) {
6443 return arr[i].hal_name;
6444 }
6445 }
6446
6447 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6448 return NAME_NOT_FOUND;
6449}
6450
6451/*===========================================================================
6452 * FUNCTION : lookupProp
6453 *
6454 * DESCRIPTION: lookup a value by its name
6455 *
6456 * PARAMETERS :
6457 * @arr : map between the two enums
6458 * @len : size of the map
6459 * @name : name to be looked up
6460 *
6461 * RETURN : Value if found
6462 * CAM_CDS_MODE_MAX if not found
6463 *==========================================================================*/
6464template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6465 size_t len, const char *name)
6466{
6467 if (name) {
6468 for (size_t i = 0; i < len; i++) {
6469 if (!strcmp(arr[i].desc, name)) {
6470 return arr[i].val;
6471 }
6472 }
6473 }
6474 return CAM_CDS_MODE_MAX;
6475}
6476
6477/*===========================================================================
6478 *
6479 * DESCRIPTION:
6480 *
6481 * PARAMETERS :
6482 * @metadata : metadata information from callback
6483 * @timestamp: metadata buffer timestamp
6484 * @request_id: request id
6485 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006486 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006487 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6488 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006489 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006490 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6491 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006492 *
6493 * RETURN : camera_metadata_t*
6494 * metadata in a format specified by fwk
6495 *==========================================================================*/
6496camera_metadata_t*
6497QCamera3HardwareInterface::translateFromHalMetadata(
6498 metadata_buffer_t *metadata,
6499 nsecs_t timestamp,
6500 int32_t request_id,
6501 const CameraMetadata& jpegMetadata,
6502 uint8_t pipeline_depth,
6503 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006504 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006505 /* DevCamDebug metadata translateFromHalMetadata argument */
6506 uint8_t DevCamDebug_meta_enable,
6507 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006508 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006509 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006510 bool lastMetadataInBatch,
6511 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006512{
6513 CameraMetadata camMetadata;
6514 camera_metadata_t *resultMetadata;
6515
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006516 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006517 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6518 * Timestamp is needed because it's used for shutter notify calculation.
6519 * */
6520 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6521 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006522 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006523 }
6524
Thierry Strudel3d639192016-09-09 11:52:26 -07006525 if (jpegMetadata.entryCount())
6526 camMetadata.append(jpegMetadata);
6527
6528 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6529 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6530 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6531 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006532 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006533 if (mBatchSize == 0) {
6534 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6535 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6536 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006537
Samuel Ha68ba5172016-12-15 18:41:12 -08006538 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6539 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6540 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6541 // DevCamDebug metadata translateFromHalMetadata AF
6542 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6543 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6544 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6545 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6546 }
6547 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6548 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6549 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6550 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6551 }
6552 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6553 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6554 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6555 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6556 }
6557 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6558 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6559 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6560 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6561 }
6562 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6563 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6564 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6565 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6566 }
6567 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6568 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6569 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6570 *DevCamDebug_af_monitor_pdaf_target_pos;
6571 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6572 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6573 }
6574 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6575 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6576 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6577 *DevCamDebug_af_monitor_pdaf_confidence;
6578 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6579 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6580 }
6581 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6582 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6583 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6584 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6585 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6586 }
6587 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6588 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6589 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6590 *DevCamDebug_af_monitor_tof_target_pos;
6591 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6592 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6593 }
6594 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6595 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6596 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6597 *DevCamDebug_af_monitor_tof_confidence;
6598 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6599 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6600 }
6601 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6602 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6603 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6604 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6605 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6606 }
6607 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6608 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6609 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6610 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6611 &fwk_DevCamDebug_af_monitor_type_select, 1);
6612 }
6613 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6614 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6615 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6616 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6617 &fwk_DevCamDebug_af_monitor_refocus, 1);
6618 }
6619 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6620 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6621 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6622 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6623 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6624 }
6625 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6626 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6627 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6628 *DevCamDebug_af_search_pdaf_target_pos;
6629 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6630 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6631 }
6632 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6633 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6634 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6635 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6636 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6637 }
6638 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6639 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6640 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6641 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6642 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6643 }
6644 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6645 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6646 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6647 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6648 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6649 }
6650 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6651 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6652 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6653 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6654 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6655 }
6656 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6657 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6658 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6659 *DevCamDebug_af_search_tof_target_pos;
6660 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6661 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6662 }
6663 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6664 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6665 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6666 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6667 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6668 }
6669 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6670 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6671 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6672 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6673 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6674 }
6675 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6676 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6677 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6678 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6679 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6680 }
6681 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6682 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6683 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6684 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6685 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6686 }
6687 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6688 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6689 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6690 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6691 &fwk_DevCamDebug_af_search_type_select, 1);
6692 }
6693 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6694 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6695 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6696 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6697 &fwk_DevCamDebug_af_search_next_pos, 1);
6698 }
6699 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6700 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6701 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6702 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6703 &fwk_DevCamDebug_af_search_target_pos, 1);
6704 }
6705 // DevCamDebug metadata translateFromHalMetadata AEC
6706 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6707 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6708 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6709 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6710 }
6711 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6712 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6713 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6714 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6715 }
6716 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6717 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6718 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6719 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6720 }
6721 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6722 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6723 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6724 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6725 }
6726 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6727 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6728 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6729 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6730 }
6731 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6732 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6733 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6734 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6735 }
6736 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6737 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6738 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6739 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6740 }
6741 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6742 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6743 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6744 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6745 }
Samuel Ha34229982017-02-17 13:51:11 -08006746 // DevCamDebug metadata translateFromHalMetadata zzHDR
6747 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6748 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6749 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6750 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6751 }
6752 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6753 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006754 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006755 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6756 }
6757 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6758 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6759 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6760 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6761 }
6762 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6763 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006764 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006765 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6766 }
6767 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6768 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6769 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6770 *DevCamDebug_aec_hdr_sensitivity_ratio;
6771 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6772 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6773 }
6774 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6775 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6776 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6777 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6778 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6779 }
6780 // DevCamDebug metadata translateFromHalMetadata ADRC
6781 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6782 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6783 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6784 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6785 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6786 }
6787 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6788 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6789 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6790 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6791 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6792 }
6793 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6794 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6795 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6796 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6797 }
6798 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6799 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6800 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6801 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6802 }
6803 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6804 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6805 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6806 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6807 }
6808 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6809 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6810 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6811 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6812 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006813 // DevCamDebug metadata translateFromHalMetadata AWB
6814 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6815 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6816 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6817 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6818 }
6819 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6820 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6821 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6822 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6823 }
6824 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6825 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6826 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6827 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6828 }
6829 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6830 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6831 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6832 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6833 }
6834 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6835 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6836 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6837 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6838 }
6839 }
6840 // atrace_end(ATRACE_TAG_ALWAYS);
6841
Thierry Strudel3d639192016-09-09 11:52:26 -07006842 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6843 int64_t fwk_frame_number = *frame_number;
6844 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6845 }
6846
6847 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6848 int32_t fps_range[2];
6849 fps_range[0] = (int32_t)float_range->min_fps;
6850 fps_range[1] = (int32_t)float_range->max_fps;
6851 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6852 fps_range, 2);
6853 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6854 fps_range[0], fps_range[1]);
6855 }
6856
6857 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6858 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6859 }
6860
6861 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6862 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6863 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6864 *sceneMode);
6865 if (NAME_NOT_FOUND != val) {
6866 uint8_t fwkSceneMode = (uint8_t)val;
6867 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6868 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6869 fwkSceneMode);
6870 }
6871 }
6872
6873 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6874 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6875 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6876 }
6877
6878 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6879 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6880 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6881 }
6882
6883 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6884 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6885 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6886 }
6887
6888 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6889 CAM_INTF_META_EDGE_MODE, metadata) {
6890 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6891 }
6892
6893 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6894 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6895 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6896 }
6897
6898 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6899 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6900 }
6901
6902 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6903 if (0 <= *flashState) {
6904 uint8_t fwk_flashState = (uint8_t) *flashState;
6905 if (!gCamCapability[mCameraId]->flash_available) {
6906 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6907 }
6908 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6909 }
6910 }
6911
6912 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6913 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6914 if (NAME_NOT_FOUND != val) {
6915 uint8_t fwk_flashMode = (uint8_t)val;
6916 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6917 }
6918 }
6919
6920 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6921 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6922 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6923 }
6924
6925 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6926 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6927 }
6928
6929 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6930 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6931 }
6932
6933 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6934 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6935 }
6936
6937 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6938 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6939 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6940 }
6941
6942 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6943 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6944 LOGD("fwk_videoStab = %d", fwk_videoStab);
6945 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6946 } else {
6947 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6948 // and so hardcoding the Video Stab result to OFF mode.
6949 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6950 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006951 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006952 }
6953
6954 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6955 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6956 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6957 }
6958
6959 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6960 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6961 }
6962
Thierry Strudel3d639192016-09-09 11:52:26 -07006963 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6964 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006965 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006966
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006967 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6968 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006969
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006970 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006971 blackLevelAppliedPattern->cam_black_level[0],
6972 blackLevelAppliedPattern->cam_black_level[1],
6973 blackLevelAppliedPattern->cam_black_level[2],
6974 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006975 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6976 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006977
6978#ifndef USE_HAL_3_3
6979 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306980 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006981 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306982 fwk_blackLevelInd[0] /= 16.0;
6983 fwk_blackLevelInd[1] /= 16.0;
6984 fwk_blackLevelInd[2] /= 16.0;
6985 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006986 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6987 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006988#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006989 }
6990
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006991#ifndef USE_HAL_3_3
6992 // Fixed whitelevel is used by ISP/Sensor
6993 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6994 &gCamCapability[mCameraId]->white_level, 1);
6995#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006996
6997 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6998 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6999 int32_t scalerCropRegion[4];
7000 scalerCropRegion[0] = hScalerCropRegion->left;
7001 scalerCropRegion[1] = hScalerCropRegion->top;
7002 scalerCropRegion[2] = hScalerCropRegion->width;
7003 scalerCropRegion[3] = hScalerCropRegion->height;
7004
7005 // Adjust crop region from sensor output coordinate system to active
7006 // array coordinate system.
7007 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7008 scalerCropRegion[2], scalerCropRegion[3]);
7009
7010 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7011 }
7012
7013 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7014 LOGD("sensorExpTime = %lld", *sensorExpTime);
7015 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7016 }
7017
7018 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7019 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7020 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7021 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7022 }
7023
7024 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7025 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7026 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7027 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7028 sensorRollingShutterSkew, 1);
7029 }
7030
7031 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7032 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7033 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7034
7035 //calculate the noise profile based on sensitivity
7036 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7037 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7038 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7039 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7040 noise_profile[i] = noise_profile_S;
7041 noise_profile[i+1] = noise_profile_O;
7042 }
7043 LOGD("noise model entry (S, O) is (%f, %f)",
7044 noise_profile_S, noise_profile_O);
7045 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7046 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7047 }
7048
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007049#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007050 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007051 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007052 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007053 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007054 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7055 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7056 }
7057 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007058#endif
7059
Thierry Strudel3d639192016-09-09 11:52:26 -07007060 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7061 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7062 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7063 }
7064
7065 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7066 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7067 *faceDetectMode);
7068 if (NAME_NOT_FOUND != val) {
7069 uint8_t fwk_faceDetectMode = (uint8_t)val;
7070 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7071
7072 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7073 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7074 CAM_INTF_META_FACE_DETECTION, metadata) {
7075 uint8_t numFaces = MIN(
7076 faceDetectionInfo->num_faces_detected, MAX_ROI);
7077 int32_t faceIds[MAX_ROI];
7078 uint8_t faceScores[MAX_ROI];
7079 int32_t faceRectangles[MAX_ROI * 4];
7080 int32_t faceLandmarks[MAX_ROI * 6];
7081 size_t j = 0, k = 0;
7082
7083 for (size_t i = 0; i < numFaces; i++) {
7084 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7085 // Adjust crop region from sensor output coordinate system to active
7086 // array coordinate system.
7087 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7088 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7089 rect.width, rect.height);
7090
7091 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7092 faceRectangles+j, -1);
7093
Jason Lee8ce36fa2017-04-19 19:40:37 -07007094 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7095 "bottom-right (%d, %d)",
7096 faceDetectionInfo->frame_id, i,
7097 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7098 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7099
Thierry Strudel3d639192016-09-09 11:52:26 -07007100 j+= 4;
7101 }
7102 if (numFaces <= 0) {
7103 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7104 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7105 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7106 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7107 }
7108
7109 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7110 numFaces);
7111 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7112 faceRectangles, numFaces * 4U);
7113 if (fwk_faceDetectMode ==
7114 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7115 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7116 CAM_INTF_META_FACE_LANDMARK, metadata) {
7117
7118 for (size_t i = 0; i < numFaces; i++) {
7119 // Map the co-ordinate sensor output coordinate system to active
7120 // array coordinate system.
7121 mCropRegionMapper.toActiveArray(
7122 landmarks->face_landmarks[i].left_eye_center.x,
7123 landmarks->face_landmarks[i].left_eye_center.y);
7124 mCropRegionMapper.toActiveArray(
7125 landmarks->face_landmarks[i].right_eye_center.x,
7126 landmarks->face_landmarks[i].right_eye_center.y);
7127 mCropRegionMapper.toActiveArray(
7128 landmarks->face_landmarks[i].mouth_center.x,
7129 landmarks->face_landmarks[i].mouth_center.y);
7130
7131 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007132
7133 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7134 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7135 faceDetectionInfo->frame_id, i,
7136 faceLandmarks[k + LEFT_EYE_X],
7137 faceLandmarks[k + LEFT_EYE_Y],
7138 faceLandmarks[k + RIGHT_EYE_X],
7139 faceLandmarks[k + RIGHT_EYE_Y],
7140 faceLandmarks[k + MOUTH_X],
7141 faceLandmarks[k + MOUTH_Y]);
7142
Thierry Strudel04e026f2016-10-10 11:27:36 -07007143 k+= TOTAL_LANDMARK_INDICES;
7144 }
7145 } else {
7146 for (size_t i = 0; i < numFaces; i++) {
7147 setInvalidLandmarks(faceLandmarks+k);
7148 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007149 }
7150 }
7151
Jason Lee49619db2017-04-13 12:07:22 -07007152 for (size_t i = 0; i < numFaces; i++) {
7153 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7154
7155 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7156 faceDetectionInfo->frame_id, i, faceIds[i]);
7157 }
7158
Thierry Strudel3d639192016-09-09 11:52:26 -07007159 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7160 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7161 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007162 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007163 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7164 CAM_INTF_META_FACE_BLINK, metadata) {
7165 uint8_t detected[MAX_ROI];
7166 uint8_t degree[MAX_ROI * 2];
7167 for (size_t i = 0; i < numFaces; i++) {
7168 detected[i] = blinks->blink[i].blink_detected;
7169 degree[2 * i] = blinks->blink[i].left_blink;
7170 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007171
Jason Lee49619db2017-04-13 12:07:22 -07007172 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7173 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7174 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7175 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007176 }
7177 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7178 detected, numFaces);
7179 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7180 degree, numFaces * 2);
7181 }
7182 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7183 CAM_INTF_META_FACE_SMILE, metadata) {
7184 uint8_t degree[MAX_ROI];
7185 uint8_t confidence[MAX_ROI];
7186 for (size_t i = 0; i < numFaces; i++) {
7187 degree[i] = smiles->smile[i].smile_degree;
7188 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007189
Jason Lee49619db2017-04-13 12:07:22 -07007190 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7191 "smile_degree=%d, smile_score=%d",
7192 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007193 }
7194 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7195 degree, numFaces);
7196 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7197 confidence, numFaces);
7198 }
7199 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7200 CAM_INTF_META_FACE_GAZE, metadata) {
7201 int8_t angle[MAX_ROI];
7202 int32_t direction[MAX_ROI * 3];
7203 int8_t degree[MAX_ROI * 2];
7204 for (size_t i = 0; i < numFaces; i++) {
7205 angle[i] = gazes->gaze[i].gaze_angle;
7206 direction[3 * i] = gazes->gaze[i].updown_dir;
7207 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7208 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7209 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7210 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007211
7212 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7213 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7214 "left_right_gaze=%d, top_bottom_gaze=%d",
7215 faceDetectionInfo->frame_id, i, angle[i],
7216 direction[3 * i], direction[3 * i + 1],
7217 direction[3 * i + 2],
7218 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007219 }
7220 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7221 (uint8_t *)angle, numFaces);
7222 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7223 direction, numFaces * 3);
7224 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7225 (uint8_t *)degree, numFaces * 2);
7226 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007227 }
7228 }
7229 }
7230 }
7231
7232 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7233 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007234 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007235 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007236 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007237
Shuzhen Wang14415f52016-11-16 18:26:18 -08007238 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7239 histogramBins = *histBins;
7240 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7241 }
7242
7243 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007244 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7245 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007246 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007247
7248 switch (stats_data->type) {
7249 case CAM_HISTOGRAM_TYPE_BAYER:
7250 switch (stats_data->bayer_stats.data_type) {
7251 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007252 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7253 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007254 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007255 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7256 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007257 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007258 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7259 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007260 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007261 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007262 case CAM_STATS_CHANNEL_R:
7263 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007264 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7265 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007266 }
7267 break;
7268 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007269 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007270 break;
7271 }
7272
Shuzhen Wang14415f52016-11-16 18:26:18 -08007273 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007274 }
7275 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007276 }
7277
7278 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7279 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7280 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7281 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7282 }
7283
7284 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7285 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7286 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7287 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7288 }
7289
7290 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7291 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7292 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7293 CAM_MAX_SHADING_MAP_HEIGHT);
7294 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7295 CAM_MAX_SHADING_MAP_WIDTH);
7296 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7297 lensShadingMap->lens_shading, 4U * map_width * map_height);
7298 }
7299
7300 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7301 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7302 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7303 }
7304
7305 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7306 //Populate CAM_INTF_META_TONEMAP_CURVES
7307 /* ch0 = G, ch 1 = B, ch 2 = R*/
7308 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7309 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7310 tonemap->tonemap_points_cnt,
7311 CAM_MAX_TONEMAP_CURVE_SIZE);
7312 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7313 }
7314
7315 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7316 &tonemap->curves[0].tonemap_points[0][0],
7317 tonemap->tonemap_points_cnt * 2);
7318
7319 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7320 &tonemap->curves[1].tonemap_points[0][0],
7321 tonemap->tonemap_points_cnt * 2);
7322
7323 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7324 &tonemap->curves[2].tonemap_points[0][0],
7325 tonemap->tonemap_points_cnt * 2);
7326 }
7327
7328 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7329 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7330 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7331 CC_GAIN_MAX);
7332 }
7333
7334 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7335 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7336 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7337 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7338 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7339 }
7340
7341 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7342 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7343 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7344 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7345 toneCurve->tonemap_points_cnt,
7346 CAM_MAX_TONEMAP_CURVE_SIZE);
7347 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7348 }
7349 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7350 (float*)toneCurve->curve.tonemap_points,
7351 toneCurve->tonemap_points_cnt * 2);
7352 }
7353
7354 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7355 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7356 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7357 predColorCorrectionGains->gains, 4);
7358 }
7359
7360 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7361 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7362 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7363 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7364 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7365 }
7366
7367 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7368 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7369 }
7370
7371 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7372 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7373 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7374 }
7375
7376 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7377 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7378 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7379 }
7380
7381 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7382 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7383 *effectMode);
7384 if (NAME_NOT_FOUND != val) {
7385 uint8_t fwk_effectMode = (uint8_t)val;
7386 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7387 }
7388 }
7389
7390 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7391 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7392 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7393 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7394 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7395 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7396 }
7397 int32_t fwk_testPatternData[4];
7398 fwk_testPatternData[0] = testPatternData->r;
7399 fwk_testPatternData[3] = testPatternData->b;
7400 switch (gCamCapability[mCameraId]->color_arrangement) {
7401 case CAM_FILTER_ARRANGEMENT_RGGB:
7402 case CAM_FILTER_ARRANGEMENT_GRBG:
7403 fwk_testPatternData[1] = testPatternData->gr;
7404 fwk_testPatternData[2] = testPatternData->gb;
7405 break;
7406 case CAM_FILTER_ARRANGEMENT_GBRG:
7407 case CAM_FILTER_ARRANGEMENT_BGGR:
7408 fwk_testPatternData[2] = testPatternData->gr;
7409 fwk_testPatternData[1] = testPatternData->gb;
7410 break;
7411 default:
7412 LOGE("color arrangement %d is not supported",
7413 gCamCapability[mCameraId]->color_arrangement);
7414 break;
7415 }
7416 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7417 }
7418
7419 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7420 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7421 }
7422
7423 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7424 String8 str((const char *)gps_methods);
7425 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7426 }
7427
7428 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7429 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7430 }
7431
7432 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7433 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7434 }
7435
7436 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7437 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7438 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7439 }
7440
7441 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7442 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7443 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7444 }
7445
7446 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7447 int32_t fwk_thumb_size[2];
7448 fwk_thumb_size[0] = thumb_size->width;
7449 fwk_thumb_size[1] = thumb_size->height;
7450 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7451 }
7452
7453 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7454 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7455 privateData,
7456 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7457 }
7458
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007459 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007460 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007461 meteringMode, 1);
7462 }
7463
Thierry Strudel54dc9782017-02-15 12:12:10 -08007464 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7465 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7466 LOGD("hdr_scene_data: %d %f\n",
7467 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7468 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7469 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7470 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7471 &isHdr, 1);
7472 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7473 &isHdrConfidence, 1);
7474 }
7475
7476
7477
Thierry Strudel3d639192016-09-09 11:52:26 -07007478 if (metadata->is_tuning_params_valid) {
7479 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7480 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7481 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7482
7483
7484 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7485 sizeof(uint32_t));
7486 data += sizeof(uint32_t);
7487
7488 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7489 sizeof(uint32_t));
7490 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7491 data += sizeof(uint32_t);
7492
7493 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7494 sizeof(uint32_t));
7495 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7496 data += sizeof(uint32_t);
7497
7498 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7499 sizeof(uint32_t));
7500 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7501 data += sizeof(uint32_t);
7502
7503 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7504 sizeof(uint32_t));
7505 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7506 data += sizeof(uint32_t);
7507
7508 metadata->tuning_params.tuning_mod3_data_size = 0;
7509 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7510 sizeof(uint32_t));
7511 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7512 data += sizeof(uint32_t);
7513
7514 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7515 TUNING_SENSOR_DATA_MAX);
7516 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7517 count);
7518 data += count;
7519
7520 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7521 TUNING_VFE_DATA_MAX);
7522 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7523 count);
7524 data += count;
7525
7526 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7527 TUNING_CPP_DATA_MAX);
7528 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7529 count);
7530 data += count;
7531
7532 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7533 TUNING_CAC_DATA_MAX);
7534 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7535 count);
7536 data += count;
7537
7538 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7539 (int32_t *)(void *)tuning_meta_data_blob,
7540 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7541 }
7542
7543 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7544 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7545 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7546 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7547 NEUTRAL_COL_POINTS);
7548 }
7549
7550 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7551 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7552 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7553 }
7554
7555 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7556 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7557 // Adjust crop region from sensor output coordinate system to active
7558 // array coordinate system.
7559 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7560 hAeRegions->rect.width, hAeRegions->rect.height);
7561
7562 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7563 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7564 REGIONS_TUPLE_COUNT);
7565 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7566 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7567 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7568 hAeRegions->rect.height);
7569 }
7570
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007571 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7572 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7573 if (NAME_NOT_FOUND != val) {
7574 uint8_t fwkAfMode = (uint8_t)val;
7575 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7576 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7577 } else {
7578 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7579 val);
7580 }
7581 }
7582
Thierry Strudel3d639192016-09-09 11:52:26 -07007583 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7584 uint8_t fwk_afState = (uint8_t) *afState;
7585 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007586 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007587 }
7588
7589 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7590 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7591 }
7592
7593 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7594 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7595 }
7596
7597 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7598 uint8_t fwk_lensState = *lensState;
7599 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7600 }
7601
Thierry Strudel3d639192016-09-09 11:52:26 -07007602
7603 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007604 uint32_t ab_mode = *hal_ab_mode;
7605 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7606 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7607 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7608 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007609 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007610 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007611 if (NAME_NOT_FOUND != val) {
7612 uint8_t fwk_ab_mode = (uint8_t)val;
7613 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7614 }
7615 }
7616
7617 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7618 int val = lookupFwkName(SCENE_MODES_MAP,
7619 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7620 if (NAME_NOT_FOUND != val) {
7621 uint8_t fwkBestshotMode = (uint8_t)val;
7622 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7623 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7624 } else {
7625 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7626 }
7627 }
7628
7629 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7630 uint8_t fwk_mode = (uint8_t) *mode;
7631 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7632 }
7633
7634 /* Constant metadata values to be update*/
7635 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7636 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7637
7638 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7639 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7640
7641 int32_t hotPixelMap[2];
7642 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7643
7644 // CDS
7645 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7646 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7647 }
7648
Thierry Strudel04e026f2016-10-10 11:27:36 -07007649 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7650 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007651 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007652 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7653 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7654 } else {
7655 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7656 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007657
7658 if(fwk_hdr != curr_hdr_state) {
7659 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7660 if(fwk_hdr)
7661 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7662 else
7663 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7664 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007665 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7666 }
7667
Thierry Strudel54dc9782017-02-15 12:12:10 -08007668 //binning correction
7669 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7670 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7671 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7672 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7673 }
7674
Thierry Strudel04e026f2016-10-10 11:27:36 -07007675 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007676 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007677 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7678 int8_t is_ir_on = 0;
7679
7680 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7681 if(is_ir_on != curr_ir_state) {
7682 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7683 if(is_ir_on)
7684 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7685 else
7686 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7687 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007688 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007689 }
7690
Thierry Strudel269c81a2016-10-12 12:13:59 -07007691 // AEC SPEED
7692 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7693 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7694 }
7695
7696 // AWB SPEED
7697 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7698 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7699 }
7700
Thierry Strudel3d639192016-09-09 11:52:26 -07007701 // TNR
7702 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7703 uint8_t tnr_enable = tnr->denoise_enable;
7704 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007705 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7706 int8_t is_tnr_on = 0;
7707
7708 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7709 if(is_tnr_on != curr_tnr_state) {
7710 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7711 if(is_tnr_on)
7712 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7713 else
7714 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7715 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007716
7717 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7718 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7719 }
7720
7721 // Reprocess crop data
7722 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7723 uint8_t cnt = crop_data->num_of_streams;
7724 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7725 // mm-qcamera-daemon only posts crop_data for streams
7726 // not linked to pproc. So no valid crop metadata is not
7727 // necessarily an error case.
7728 LOGD("No valid crop metadata entries");
7729 } else {
7730 uint32_t reproc_stream_id;
7731 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7732 LOGD("No reprocessible stream found, ignore crop data");
7733 } else {
7734 int rc = NO_ERROR;
7735 Vector<int32_t> roi_map;
7736 int32_t *crop = new int32_t[cnt*4];
7737 if (NULL == crop) {
7738 rc = NO_MEMORY;
7739 }
7740 if (NO_ERROR == rc) {
7741 int32_t streams_found = 0;
7742 for (size_t i = 0; i < cnt; i++) {
7743 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7744 if (pprocDone) {
7745 // HAL already does internal reprocessing,
7746 // either via reprocessing before JPEG encoding,
7747 // or offline postprocessing for pproc bypass case.
7748 crop[0] = 0;
7749 crop[1] = 0;
7750 crop[2] = mInputStreamInfo.dim.width;
7751 crop[3] = mInputStreamInfo.dim.height;
7752 } else {
7753 crop[0] = crop_data->crop_info[i].crop.left;
7754 crop[1] = crop_data->crop_info[i].crop.top;
7755 crop[2] = crop_data->crop_info[i].crop.width;
7756 crop[3] = crop_data->crop_info[i].crop.height;
7757 }
7758 roi_map.add(crop_data->crop_info[i].roi_map.left);
7759 roi_map.add(crop_data->crop_info[i].roi_map.top);
7760 roi_map.add(crop_data->crop_info[i].roi_map.width);
7761 roi_map.add(crop_data->crop_info[i].roi_map.height);
7762 streams_found++;
7763 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7764 crop[0], crop[1], crop[2], crop[3]);
7765 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7766 crop_data->crop_info[i].roi_map.left,
7767 crop_data->crop_info[i].roi_map.top,
7768 crop_data->crop_info[i].roi_map.width,
7769 crop_data->crop_info[i].roi_map.height);
7770 break;
7771
7772 }
7773 }
7774 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7775 &streams_found, 1);
7776 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7777 crop, (size_t)(streams_found * 4));
7778 if (roi_map.array()) {
7779 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7780 roi_map.array(), roi_map.size());
7781 }
7782 }
7783 if (crop) {
7784 delete [] crop;
7785 }
7786 }
7787 }
7788 }
7789
7790 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7791 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7792 // so hardcoding the CAC result to OFF mode.
7793 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7794 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7795 } else {
7796 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7797 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7798 *cacMode);
7799 if (NAME_NOT_FOUND != val) {
7800 uint8_t resultCacMode = (uint8_t)val;
7801 // check whether CAC result from CB is equal to Framework set CAC mode
7802 // If not equal then set the CAC mode came in corresponding request
7803 if (fwk_cacMode != resultCacMode) {
7804 resultCacMode = fwk_cacMode;
7805 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007806 //Check if CAC is disabled by property
7807 if (m_cacModeDisabled) {
7808 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7809 }
7810
Thierry Strudel3d639192016-09-09 11:52:26 -07007811 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7812 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7813 } else {
7814 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7815 }
7816 }
7817 }
7818
7819 // Post blob of cam_cds_data through vendor tag.
7820 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7821 uint8_t cnt = cdsInfo->num_of_streams;
7822 cam_cds_data_t cdsDataOverride;
7823 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7824 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7825 cdsDataOverride.num_of_streams = 1;
7826 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7827 uint32_t reproc_stream_id;
7828 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7829 LOGD("No reprocessible stream found, ignore cds data");
7830 } else {
7831 for (size_t i = 0; i < cnt; i++) {
7832 if (cdsInfo->cds_info[i].stream_id ==
7833 reproc_stream_id) {
7834 cdsDataOverride.cds_info[0].cds_enable =
7835 cdsInfo->cds_info[i].cds_enable;
7836 break;
7837 }
7838 }
7839 }
7840 } else {
7841 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7842 }
7843 camMetadata.update(QCAMERA3_CDS_INFO,
7844 (uint8_t *)&cdsDataOverride,
7845 sizeof(cam_cds_data_t));
7846 }
7847
7848 // Ldaf calibration data
7849 if (!mLdafCalibExist) {
7850 IF_META_AVAILABLE(uint32_t, ldafCalib,
7851 CAM_INTF_META_LDAF_EXIF, metadata) {
7852 mLdafCalibExist = true;
7853 mLdafCalib[0] = ldafCalib[0];
7854 mLdafCalib[1] = ldafCalib[1];
7855 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7856 ldafCalib[0], ldafCalib[1]);
7857 }
7858 }
7859
Thierry Strudel54dc9782017-02-15 12:12:10 -08007860 // EXIF debug data through vendor tag
7861 /*
7862 * Mobicat Mask can assume 3 values:
7863 * 1 refers to Mobicat data,
7864 * 2 refers to Stats Debug and Exif Debug Data
7865 * 3 refers to Mobicat and Stats Debug Data
7866 * We want to make sure that we are sending Exif debug data
7867 * only when Mobicat Mask is 2.
7868 */
7869 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7870 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7871 (uint8_t *)(void *)mExifParams.debug_params,
7872 sizeof(mm_jpeg_debug_exif_params_t));
7873 }
7874
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007875 // Reprocess and DDM debug data through vendor tag
7876 cam_reprocess_info_t repro_info;
7877 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007878 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7879 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007880 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007881 }
7882 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7883 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007884 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007885 }
7886 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7887 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007888 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007889 }
7890 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7891 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007892 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007893 }
7894 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7895 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007896 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007897 }
7898 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007899 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007900 }
7901 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7902 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007903 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007904 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007905 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7906 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7907 }
7908 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7909 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7910 }
7911 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7912 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007913
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007914 // INSTANT AEC MODE
7915 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7916 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7917 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7918 }
7919
Shuzhen Wange763e802016-03-31 10:24:29 -07007920 // AF scene change
7921 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7922 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7923 }
7924
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007925 // Enable ZSL
7926 if (enableZsl != nullptr) {
7927 uint8_t value = *enableZsl ?
7928 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7929 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7930 }
7931
Xu Han821ea9c2017-05-23 09:00:40 -07007932 // OIS Data
7933 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
7934 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
7935 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
7936 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
7937 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
7938 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
7939 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
7940 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
7941 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
7942 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
7943 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
7944 }
7945
Thierry Strudel3d639192016-09-09 11:52:26 -07007946 resultMetadata = camMetadata.release();
7947 return resultMetadata;
7948}
7949
7950/*===========================================================================
7951 * FUNCTION : saveExifParams
7952 *
7953 * DESCRIPTION:
7954 *
7955 * PARAMETERS :
7956 * @metadata : metadata information from callback
7957 *
7958 * RETURN : none
7959 *
7960 *==========================================================================*/
7961void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7962{
7963 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7964 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7965 if (mExifParams.debug_params) {
7966 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7967 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7968 }
7969 }
7970 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7971 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7972 if (mExifParams.debug_params) {
7973 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7974 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7975 }
7976 }
7977 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7978 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7979 if (mExifParams.debug_params) {
7980 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7981 mExifParams.debug_params->af_debug_params_valid = TRUE;
7982 }
7983 }
7984 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7985 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7986 if (mExifParams.debug_params) {
7987 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7988 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7989 }
7990 }
7991 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7992 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7993 if (mExifParams.debug_params) {
7994 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7995 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7996 }
7997 }
7998 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7999 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8000 if (mExifParams.debug_params) {
8001 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8002 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8003 }
8004 }
8005 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8006 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8007 if (mExifParams.debug_params) {
8008 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8009 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8010 }
8011 }
8012 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8013 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8014 if (mExifParams.debug_params) {
8015 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8016 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8017 }
8018 }
8019}
8020
8021/*===========================================================================
8022 * FUNCTION : get3AExifParams
8023 *
8024 * DESCRIPTION:
8025 *
8026 * PARAMETERS : none
8027 *
8028 *
8029 * RETURN : mm_jpeg_exif_params_t
8030 *
8031 *==========================================================================*/
8032mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8033{
8034 return mExifParams;
8035}
8036
8037/*===========================================================================
8038 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8039 *
8040 * DESCRIPTION:
8041 *
8042 * PARAMETERS :
8043 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008044 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8045 * urgent metadata in a batch. Always true for
8046 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008047 *
8048 * RETURN : camera_metadata_t*
8049 * metadata in a format specified by fwk
8050 *==========================================================================*/
8051camera_metadata_t*
8052QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008053 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008054{
8055 CameraMetadata camMetadata;
8056 camera_metadata_t *resultMetadata;
8057
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008058 if (!lastUrgentMetadataInBatch) {
8059 /* In batch mode, use empty metadata if this is not the last in batch
8060 */
8061 resultMetadata = allocate_camera_metadata(0, 0);
8062 return resultMetadata;
8063 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008064
8065 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8066 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8067 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8068 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8069 }
8070
8071 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8072 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8073 &aecTrigger->trigger, 1);
8074 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8075 &aecTrigger->trigger_id, 1);
8076 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8077 aecTrigger->trigger);
8078 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8079 aecTrigger->trigger_id);
8080 }
8081
8082 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8083 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8084 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8085 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8086 }
8087
Thierry Strudel3d639192016-09-09 11:52:26 -07008088 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8089 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8090 &af_trigger->trigger, 1);
8091 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8092 af_trigger->trigger);
8093 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8094 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8095 af_trigger->trigger_id);
8096 }
8097
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008098 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8099 /*af regions*/
8100 int32_t afRegions[REGIONS_TUPLE_COUNT];
8101 // Adjust crop region from sensor output coordinate system to active
8102 // array coordinate system.
8103 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8104 hAfRegions->rect.width, hAfRegions->rect.height);
8105
8106 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8107 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8108 REGIONS_TUPLE_COUNT);
8109 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8110 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8111 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8112 hAfRegions->rect.height);
8113 }
8114
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008115 // AF region confidence
8116 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8117 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8118 }
8119
Thierry Strudel3d639192016-09-09 11:52:26 -07008120 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8121 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8122 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8123 if (NAME_NOT_FOUND != val) {
8124 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8125 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8126 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8127 } else {
8128 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8129 }
8130 }
8131
8132 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8133 uint32_t aeMode = CAM_AE_MODE_MAX;
8134 int32_t flashMode = CAM_FLASH_MODE_MAX;
8135 int32_t redeye = -1;
8136 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8137 aeMode = *pAeMode;
8138 }
8139 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8140 flashMode = *pFlashMode;
8141 }
8142 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8143 redeye = *pRedeye;
8144 }
8145
8146 if (1 == redeye) {
8147 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8148 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8149 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8150 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8151 flashMode);
8152 if (NAME_NOT_FOUND != val) {
8153 fwk_aeMode = (uint8_t)val;
8154 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8155 } else {
8156 LOGE("Unsupported flash mode %d", flashMode);
8157 }
8158 } else if (aeMode == CAM_AE_MODE_ON) {
8159 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8160 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8161 } else if (aeMode == CAM_AE_MODE_OFF) {
8162 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8163 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008164 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8165 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8166 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008167 } else {
8168 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8169 "flashMode:%d, aeMode:%u!!!",
8170 redeye, flashMode, aeMode);
8171 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008172 if (mInstantAEC) {
8173 // Increment frame Idx count untill a bound reached for instant AEC.
8174 mInstantAecFrameIdxCount++;
8175 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8176 CAM_INTF_META_AEC_INFO, metadata) {
8177 LOGH("ae_params->settled = %d",ae_params->settled);
8178 // If AEC settled, or if number of frames reached bound value,
8179 // should reset instant AEC.
8180 if (ae_params->settled ||
8181 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8182 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8183 mInstantAEC = false;
8184 mResetInstantAEC = true;
8185 mInstantAecFrameIdxCount = 0;
8186 }
8187 }
8188 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008189 resultMetadata = camMetadata.release();
8190 return resultMetadata;
8191}
8192
8193/*===========================================================================
8194 * FUNCTION : dumpMetadataToFile
8195 *
8196 * DESCRIPTION: Dumps tuning metadata to file system
8197 *
8198 * PARAMETERS :
8199 * @meta : tuning metadata
8200 * @dumpFrameCount : current dump frame count
8201 * @enabled : Enable mask
8202 *
8203 *==========================================================================*/
8204void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8205 uint32_t &dumpFrameCount,
8206 bool enabled,
8207 const char *type,
8208 uint32_t frameNumber)
8209{
8210 //Some sanity checks
8211 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8212 LOGE("Tuning sensor data size bigger than expected %d: %d",
8213 meta.tuning_sensor_data_size,
8214 TUNING_SENSOR_DATA_MAX);
8215 return;
8216 }
8217
8218 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8219 LOGE("Tuning VFE data size bigger than expected %d: %d",
8220 meta.tuning_vfe_data_size,
8221 TUNING_VFE_DATA_MAX);
8222 return;
8223 }
8224
8225 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8226 LOGE("Tuning CPP data size bigger than expected %d: %d",
8227 meta.tuning_cpp_data_size,
8228 TUNING_CPP_DATA_MAX);
8229 return;
8230 }
8231
8232 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8233 LOGE("Tuning CAC data size bigger than expected %d: %d",
8234 meta.tuning_cac_data_size,
8235 TUNING_CAC_DATA_MAX);
8236 return;
8237 }
8238 //
8239
8240 if(enabled){
8241 char timeBuf[FILENAME_MAX];
8242 char buf[FILENAME_MAX];
8243 memset(buf, 0, sizeof(buf));
8244 memset(timeBuf, 0, sizeof(timeBuf));
8245 time_t current_time;
8246 struct tm * timeinfo;
8247 time (&current_time);
8248 timeinfo = localtime (&current_time);
8249 if (timeinfo != NULL) {
8250 strftime (timeBuf, sizeof(timeBuf),
8251 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8252 }
8253 String8 filePath(timeBuf);
8254 snprintf(buf,
8255 sizeof(buf),
8256 "%dm_%s_%d.bin",
8257 dumpFrameCount,
8258 type,
8259 frameNumber);
8260 filePath.append(buf);
8261 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8262 if (file_fd >= 0) {
8263 ssize_t written_len = 0;
8264 meta.tuning_data_version = TUNING_DATA_VERSION;
8265 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8266 written_len += write(file_fd, data, sizeof(uint32_t));
8267 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8268 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8269 written_len += write(file_fd, data, sizeof(uint32_t));
8270 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8271 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8272 written_len += write(file_fd, data, sizeof(uint32_t));
8273 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8274 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8275 written_len += write(file_fd, data, sizeof(uint32_t));
8276 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8277 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8278 written_len += write(file_fd, data, sizeof(uint32_t));
8279 meta.tuning_mod3_data_size = 0;
8280 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8281 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8282 written_len += write(file_fd, data, sizeof(uint32_t));
8283 size_t total_size = meta.tuning_sensor_data_size;
8284 data = (void *)((uint8_t *)&meta.data);
8285 written_len += write(file_fd, data, total_size);
8286 total_size = meta.tuning_vfe_data_size;
8287 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8288 written_len += write(file_fd, data, total_size);
8289 total_size = meta.tuning_cpp_data_size;
8290 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8291 written_len += write(file_fd, data, total_size);
8292 total_size = meta.tuning_cac_data_size;
8293 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8294 written_len += write(file_fd, data, total_size);
8295 close(file_fd);
8296 }else {
8297 LOGE("fail to open file for metadata dumping");
8298 }
8299 }
8300}
8301
8302/*===========================================================================
8303 * FUNCTION : cleanAndSortStreamInfo
8304 *
8305 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8306 * and sort them such that raw stream is at the end of the list
8307 * This is a workaround for camera daemon constraint.
8308 *
8309 * PARAMETERS : None
8310 *
8311 *==========================================================================*/
8312void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8313{
8314 List<stream_info_t *> newStreamInfo;
8315
8316 /*clean up invalid streams*/
8317 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8318 it != mStreamInfo.end();) {
8319 if(((*it)->status) == INVALID){
8320 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8321 delete channel;
8322 free(*it);
8323 it = mStreamInfo.erase(it);
8324 } else {
8325 it++;
8326 }
8327 }
8328
8329 // Move preview/video/callback/snapshot streams into newList
8330 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8331 it != mStreamInfo.end();) {
8332 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8333 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8334 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8335 newStreamInfo.push_back(*it);
8336 it = mStreamInfo.erase(it);
8337 } else
8338 it++;
8339 }
8340 // Move raw streams into newList
8341 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8342 it != mStreamInfo.end();) {
8343 newStreamInfo.push_back(*it);
8344 it = mStreamInfo.erase(it);
8345 }
8346
8347 mStreamInfo = newStreamInfo;
8348}
8349
8350/*===========================================================================
8351 * FUNCTION : extractJpegMetadata
8352 *
8353 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8354 * JPEG metadata is cached in HAL, and return as part of capture
8355 * result when metadata is returned from camera daemon.
8356 *
8357 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8358 * @request: capture request
8359 *
8360 *==========================================================================*/
8361void QCamera3HardwareInterface::extractJpegMetadata(
8362 CameraMetadata& jpegMetadata,
8363 const camera3_capture_request_t *request)
8364{
8365 CameraMetadata frame_settings;
8366 frame_settings = request->settings;
8367
8368 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8369 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8370 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8371 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8372
8373 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8374 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8375 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8376 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8377
8378 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8379 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8380 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8381 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8382
8383 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8384 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8385 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8386 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8387
8388 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8389 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8390 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8391 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8392
8393 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8394 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8395 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8396 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8397
8398 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8399 int32_t thumbnail_size[2];
8400 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8401 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8402 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8403 int32_t orientation =
8404 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008405 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008406 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8407 int32_t temp;
8408 temp = thumbnail_size[0];
8409 thumbnail_size[0] = thumbnail_size[1];
8410 thumbnail_size[1] = temp;
8411 }
8412 }
8413 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8414 thumbnail_size,
8415 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8416 }
8417
8418}
8419
8420/*===========================================================================
8421 * FUNCTION : convertToRegions
8422 *
8423 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8424 *
8425 * PARAMETERS :
8426 * @rect : cam_rect_t struct to convert
8427 * @region : int32_t destination array
8428 * @weight : if we are converting from cam_area_t, weight is valid
8429 * else weight = -1
8430 *
8431 *==========================================================================*/
8432void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8433 int32_t *region, int weight)
8434{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008435 region[FACE_LEFT] = rect.left;
8436 region[FACE_TOP] = rect.top;
8437 region[FACE_RIGHT] = rect.left + rect.width;
8438 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008439 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008440 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008441 }
8442}
8443
8444/*===========================================================================
8445 * FUNCTION : convertFromRegions
8446 *
8447 * DESCRIPTION: helper method to convert from array to cam_rect_t
8448 *
8449 * PARAMETERS :
8450 * @rect : cam_rect_t struct to convert
8451 * @region : int32_t destination array
8452 * @weight : if we are converting from cam_area_t, weight is valid
8453 * else weight = -1
8454 *
8455 *==========================================================================*/
8456void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008457 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008458{
Thierry Strudel3d639192016-09-09 11:52:26 -07008459 int32_t x_min = frame_settings.find(tag).data.i32[0];
8460 int32_t y_min = frame_settings.find(tag).data.i32[1];
8461 int32_t x_max = frame_settings.find(tag).data.i32[2];
8462 int32_t y_max = frame_settings.find(tag).data.i32[3];
8463 roi.weight = frame_settings.find(tag).data.i32[4];
8464 roi.rect.left = x_min;
8465 roi.rect.top = y_min;
8466 roi.rect.width = x_max - x_min;
8467 roi.rect.height = y_max - y_min;
8468}
8469
8470/*===========================================================================
8471 * FUNCTION : resetIfNeededROI
8472 *
8473 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8474 * crop region
8475 *
8476 * PARAMETERS :
8477 * @roi : cam_area_t struct to resize
8478 * @scalerCropRegion : cam_crop_region_t region to compare against
8479 *
8480 *
8481 *==========================================================================*/
8482bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8483 const cam_crop_region_t* scalerCropRegion)
8484{
8485 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8486 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8487 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8488 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8489
8490 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8491 * without having this check the calculations below to validate if the roi
8492 * is inside scalar crop region will fail resulting in the roi not being
8493 * reset causing algorithm to continue to use stale roi window
8494 */
8495 if (roi->weight == 0) {
8496 return true;
8497 }
8498
8499 if ((roi_x_max < scalerCropRegion->left) ||
8500 // right edge of roi window is left of scalar crop's left edge
8501 (roi_y_max < scalerCropRegion->top) ||
8502 // bottom edge of roi window is above scalar crop's top edge
8503 (roi->rect.left > crop_x_max) ||
8504 // left edge of roi window is beyond(right) of scalar crop's right edge
8505 (roi->rect.top > crop_y_max)){
8506 // top edge of roi windo is above scalar crop's top edge
8507 return false;
8508 }
8509 if (roi->rect.left < scalerCropRegion->left) {
8510 roi->rect.left = scalerCropRegion->left;
8511 }
8512 if (roi->rect.top < scalerCropRegion->top) {
8513 roi->rect.top = scalerCropRegion->top;
8514 }
8515 if (roi_x_max > crop_x_max) {
8516 roi_x_max = crop_x_max;
8517 }
8518 if (roi_y_max > crop_y_max) {
8519 roi_y_max = crop_y_max;
8520 }
8521 roi->rect.width = roi_x_max - roi->rect.left;
8522 roi->rect.height = roi_y_max - roi->rect.top;
8523 return true;
8524}
8525
8526/*===========================================================================
8527 * FUNCTION : convertLandmarks
8528 *
8529 * DESCRIPTION: helper method to extract the landmarks from face detection info
8530 *
8531 * PARAMETERS :
8532 * @landmark_data : input landmark data to be converted
8533 * @landmarks : int32_t destination array
8534 *
8535 *
8536 *==========================================================================*/
8537void QCamera3HardwareInterface::convertLandmarks(
8538 cam_face_landmarks_info_t landmark_data,
8539 int32_t *landmarks)
8540{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008541 if (landmark_data.is_left_eye_valid) {
8542 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8543 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8544 } else {
8545 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8546 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8547 }
8548
8549 if (landmark_data.is_right_eye_valid) {
8550 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8551 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8552 } else {
8553 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8554 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8555 }
8556
8557 if (landmark_data.is_mouth_valid) {
8558 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8559 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8560 } else {
8561 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8562 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8563 }
8564}
8565
8566/*===========================================================================
8567 * FUNCTION : setInvalidLandmarks
8568 *
8569 * DESCRIPTION: helper method to set invalid landmarks
8570 *
8571 * PARAMETERS :
8572 * @landmarks : int32_t destination array
8573 *
8574 *
8575 *==========================================================================*/
8576void QCamera3HardwareInterface::setInvalidLandmarks(
8577 int32_t *landmarks)
8578{
8579 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8580 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8581 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8582 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8583 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8584 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008585}
8586
8587#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008588
8589/*===========================================================================
8590 * FUNCTION : getCapabilities
8591 *
8592 * DESCRIPTION: query camera capability from back-end
8593 *
8594 * PARAMETERS :
8595 * @ops : mm-interface ops structure
8596 * @cam_handle : camera handle for which we need capability
8597 *
8598 * RETURN : ptr type of capability structure
8599 * capability for success
8600 * NULL for failure
8601 *==========================================================================*/
8602cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8603 uint32_t cam_handle)
8604{
8605 int rc = NO_ERROR;
8606 QCamera3HeapMemory *capabilityHeap = NULL;
8607 cam_capability_t *cap_ptr = NULL;
8608
8609 if (ops == NULL) {
8610 LOGE("Invalid arguments");
8611 return NULL;
8612 }
8613
8614 capabilityHeap = new QCamera3HeapMemory(1);
8615 if (capabilityHeap == NULL) {
8616 LOGE("creation of capabilityHeap failed");
8617 return NULL;
8618 }
8619
8620 /* Allocate memory for capability buffer */
8621 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8622 if(rc != OK) {
8623 LOGE("No memory for cappability");
8624 goto allocate_failed;
8625 }
8626
8627 /* Map memory for capability buffer */
8628 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8629
8630 rc = ops->map_buf(cam_handle,
8631 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8632 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8633 if(rc < 0) {
8634 LOGE("failed to map capability buffer");
8635 rc = FAILED_TRANSACTION;
8636 goto map_failed;
8637 }
8638
8639 /* Query Capability */
8640 rc = ops->query_capability(cam_handle);
8641 if(rc < 0) {
8642 LOGE("failed to query capability");
8643 rc = FAILED_TRANSACTION;
8644 goto query_failed;
8645 }
8646
8647 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8648 if (cap_ptr == NULL) {
8649 LOGE("out of memory");
8650 rc = NO_MEMORY;
8651 goto query_failed;
8652 }
8653
8654 memset(cap_ptr, 0, sizeof(cam_capability_t));
8655 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8656
8657 int index;
8658 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8659 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8660 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8661 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8662 }
8663
8664query_failed:
8665 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8666map_failed:
8667 capabilityHeap->deallocate();
8668allocate_failed:
8669 delete capabilityHeap;
8670
8671 if (rc != NO_ERROR) {
8672 return NULL;
8673 } else {
8674 return cap_ptr;
8675 }
8676}
8677
Thierry Strudel3d639192016-09-09 11:52:26 -07008678/*===========================================================================
8679 * FUNCTION : initCapabilities
8680 *
8681 * DESCRIPTION: initialize camera capabilities in static data struct
8682 *
8683 * PARAMETERS :
8684 * @cameraId : camera Id
8685 *
8686 * RETURN : int32_t type of status
8687 * NO_ERROR -- success
8688 * none-zero failure code
8689 *==========================================================================*/
8690int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8691{
8692 int rc = 0;
8693 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008694 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008695
8696 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8697 if (rc) {
8698 LOGE("camera_open failed. rc = %d", rc);
8699 goto open_failed;
8700 }
8701 if (!cameraHandle) {
8702 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8703 goto open_failed;
8704 }
8705
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008706 handle = get_main_camera_handle(cameraHandle->camera_handle);
8707 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8708 if (gCamCapability[cameraId] == NULL) {
8709 rc = FAILED_TRANSACTION;
8710 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008711 }
8712
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008713 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008714 if (is_dual_camera_by_idx(cameraId)) {
8715 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8716 gCamCapability[cameraId]->aux_cam_cap =
8717 getCapabilities(cameraHandle->ops, handle);
8718 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8719 rc = FAILED_TRANSACTION;
8720 free(gCamCapability[cameraId]);
8721 goto failed_op;
8722 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008723
8724 // Copy the main camera capability to main_cam_cap struct
8725 gCamCapability[cameraId]->main_cam_cap =
8726 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8727 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8728 LOGE("out of memory");
8729 rc = NO_MEMORY;
8730 goto failed_op;
8731 }
8732 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8733 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008734 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008735failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008736 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8737 cameraHandle = NULL;
8738open_failed:
8739 return rc;
8740}
8741
8742/*==========================================================================
8743 * FUNCTION : get3Aversion
8744 *
8745 * DESCRIPTION: get the Q3A S/W version
8746 *
8747 * PARAMETERS :
8748 * @sw_version: Reference of Q3A structure which will hold version info upon
8749 * return
8750 *
8751 * RETURN : None
8752 *
8753 *==========================================================================*/
8754void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8755{
8756 if(gCamCapability[mCameraId])
8757 sw_version = gCamCapability[mCameraId]->q3a_version;
8758 else
8759 LOGE("Capability structure NULL!");
8760}
8761
8762
8763/*===========================================================================
8764 * FUNCTION : initParameters
8765 *
8766 * DESCRIPTION: initialize camera parameters
8767 *
8768 * PARAMETERS :
8769 *
8770 * RETURN : int32_t type of status
8771 * NO_ERROR -- success
8772 * none-zero failure code
8773 *==========================================================================*/
8774int QCamera3HardwareInterface::initParameters()
8775{
8776 int rc = 0;
8777
8778 //Allocate Set Param Buffer
8779 mParamHeap = new QCamera3HeapMemory(1);
8780 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8781 if(rc != OK) {
8782 rc = NO_MEMORY;
8783 LOGE("Failed to allocate SETPARM Heap memory");
8784 delete mParamHeap;
8785 mParamHeap = NULL;
8786 return rc;
8787 }
8788
8789 //Map memory for parameters buffer
8790 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8791 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8792 mParamHeap->getFd(0),
8793 sizeof(metadata_buffer_t),
8794 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8795 if(rc < 0) {
8796 LOGE("failed to map SETPARM buffer");
8797 rc = FAILED_TRANSACTION;
8798 mParamHeap->deallocate();
8799 delete mParamHeap;
8800 mParamHeap = NULL;
8801 return rc;
8802 }
8803
8804 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8805
8806 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8807 return rc;
8808}
8809
8810/*===========================================================================
8811 * FUNCTION : deinitParameters
8812 *
8813 * DESCRIPTION: de-initialize camera parameters
8814 *
8815 * PARAMETERS :
8816 *
8817 * RETURN : NONE
8818 *==========================================================================*/
8819void QCamera3HardwareInterface::deinitParameters()
8820{
8821 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8822 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8823
8824 mParamHeap->deallocate();
8825 delete mParamHeap;
8826 mParamHeap = NULL;
8827
8828 mParameters = NULL;
8829
8830 free(mPrevParameters);
8831 mPrevParameters = NULL;
8832}
8833
8834/*===========================================================================
8835 * FUNCTION : calcMaxJpegSize
8836 *
8837 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8838 *
8839 * PARAMETERS :
8840 *
8841 * RETURN : max_jpeg_size
8842 *==========================================================================*/
8843size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8844{
8845 size_t max_jpeg_size = 0;
8846 size_t temp_width, temp_height;
8847 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8848 MAX_SIZES_CNT);
8849 for (size_t i = 0; i < count; i++) {
8850 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8851 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8852 if (temp_width * temp_height > max_jpeg_size ) {
8853 max_jpeg_size = temp_width * temp_height;
8854 }
8855 }
8856 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8857 return max_jpeg_size;
8858}
8859
8860/*===========================================================================
8861 * FUNCTION : getMaxRawSize
8862 *
8863 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8864 *
8865 * PARAMETERS :
8866 *
8867 * RETURN : Largest supported Raw Dimension
8868 *==========================================================================*/
8869cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8870{
8871 int max_width = 0;
8872 cam_dimension_t maxRawSize;
8873
8874 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8875 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8876 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8877 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8878 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8879 }
8880 }
8881 return maxRawSize;
8882}
8883
8884
8885/*===========================================================================
8886 * FUNCTION : calcMaxJpegDim
8887 *
8888 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8889 *
8890 * PARAMETERS :
8891 *
8892 * RETURN : max_jpeg_dim
8893 *==========================================================================*/
8894cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8895{
8896 cam_dimension_t max_jpeg_dim;
8897 cam_dimension_t curr_jpeg_dim;
8898 max_jpeg_dim.width = 0;
8899 max_jpeg_dim.height = 0;
8900 curr_jpeg_dim.width = 0;
8901 curr_jpeg_dim.height = 0;
8902 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8903 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8904 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8905 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8906 max_jpeg_dim.width * max_jpeg_dim.height ) {
8907 max_jpeg_dim.width = curr_jpeg_dim.width;
8908 max_jpeg_dim.height = curr_jpeg_dim.height;
8909 }
8910 }
8911 return max_jpeg_dim;
8912}
8913
8914/*===========================================================================
8915 * FUNCTION : addStreamConfig
8916 *
8917 * DESCRIPTION: adds the stream configuration to the array
8918 *
8919 * PARAMETERS :
8920 * @available_stream_configs : pointer to stream configuration array
8921 * @scalar_format : scalar format
8922 * @dim : configuration dimension
8923 * @config_type : input or output configuration type
8924 *
8925 * RETURN : NONE
8926 *==========================================================================*/
8927void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8928 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8929{
8930 available_stream_configs.add(scalar_format);
8931 available_stream_configs.add(dim.width);
8932 available_stream_configs.add(dim.height);
8933 available_stream_configs.add(config_type);
8934}
8935
8936/*===========================================================================
8937 * FUNCTION : suppportBurstCapture
8938 *
8939 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8940 *
8941 * PARAMETERS :
8942 * @cameraId : camera Id
8943 *
8944 * RETURN : true if camera supports BURST_CAPTURE
8945 * false otherwise
8946 *==========================================================================*/
8947bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8948{
8949 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8950 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8951 const int32_t highResWidth = 3264;
8952 const int32_t highResHeight = 2448;
8953
8954 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8955 // Maximum resolution images cannot be captured at >= 10fps
8956 // -> not supporting BURST_CAPTURE
8957 return false;
8958 }
8959
8960 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8961 // Maximum resolution images can be captured at >= 20fps
8962 // --> supporting BURST_CAPTURE
8963 return true;
8964 }
8965
8966 // Find the smallest highRes resolution, or largest resolution if there is none
8967 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8968 MAX_SIZES_CNT);
8969 size_t highRes = 0;
8970 while ((highRes + 1 < totalCnt) &&
8971 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8972 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8973 highResWidth * highResHeight)) {
8974 highRes++;
8975 }
8976 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8977 return true;
8978 } else {
8979 return false;
8980 }
8981}
8982
8983/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008984 * FUNCTION : getPDStatIndex
8985 *
8986 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8987 *
8988 * PARAMETERS :
8989 * @caps : camera capabilities
8990 *
8991 * RETURN : int32_t type
8992 * non-negative - on success
8993 * -1 - on failure
8994 *==========================================================================*/
8995int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8996 if (nullptr == caps) {
8997 return -1;
8998 }
8999
9000 uint32_t metaRawCount = caps->meta_raw_channel_count;
9001 int32_t ret = -1;
9002 for (size_t i = 0; i < metaRawCount; i++) {
9003 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9004 ret = i;
9005 break;
9006 }
9007 }
9008
9009 return ret;
9010}
9011
9012/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009013 * FUNCTION : initStaticMetadata
9014 *
9015 * DESCRIPTION: initialize the static metadata
9016 *
9017 * PARAMETERS :
9018 * @cameraId : camera Id
9019 *
9020 * RETURN : int32_t type of status
9021 * 0 -- success
9022 * non-zero failure code
9023 *==========================================================================*/
9024int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9025{
9026 int rc = 0;
9027 CameraMetadata staticInfo;
9028 size_t count = 0;
9029 bool limitedDevice = false;
9030 char prop[PROPERTY_VALUE_MAX];
9031 bool supportBurst = false;
9032
9033 supportBurst = supportBurstCapture(cameraId);
9034
9035 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9036 * guaranteed or if min fps of max resolution is less than 20 fps, its
9037 * advertised as limited device*/
9038 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9039 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9040 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9041 !supportBurst;
9042
9043 uint8_t supportedHwLvl = limitedDevice ?
9044 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009045#ifndef USE_HAL_3_3
9046 // LEVEL_3 - This device will support level 3.
9047 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9048#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009049 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009050#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009051
9052 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9053 &supportedHwLvl, 1);
9054
9055 bool facingBack = false;
9056 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9057 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9058 facingBack = true;
9059 }
9060 /*HAL 3 only*/
9061 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9062 &gCamCapability[cameraId]->min_focus_distance, 1);
9063
9064 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9065 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9066
9067 /*should be using focal lengths but sensor doesn't provide that info now*/
9068 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9069 &gCamCapability[cameraId]->focal_length,
9070 1);
9071
9072 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9073 gCamCapability[cameraId]->apertures,
9074 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9075
9076 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9077 gCamCapability[cameraId]->filter_densities,
9078 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9079
9080
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009081 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9082 size_t mode_count =
9083 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9084 for (size_t i = 0; i < mode_count; i++) {
9085 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9086 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009087 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009088 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009089
9090 int32_t lens_shading_map_size[] = {
9091 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9092 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9093 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9094 lens_shading_map_size,
9095 sizeof(lens_shading_map_size)/sizeof(int32_t));
9096
9097 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9098 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9099
9100 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9101 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9102
9103 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9104 &gCamCapability[cameraId]->max_frame_duration, 1);
9105
9106 camera_metadata_rational baseGainFactor = {
9107 gCamCapability[cameraId]->base_gain_factor.numerator,
9108 gCamCapability[cameraId]->base_gain_factor.denominator};
9109 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9110 &baseGainFactor, 1);
9111
9112 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9113 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9114
9115 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9116 gCamCapability[cameraId]->pixel_array_size.height};
9117 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9118 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9119
9120 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9121 gCamCapability[cameraId]->active_array_size.top,
9122 gCamCapability[cameraId]->active_array_size.width,
9123 gCamCapability[cameraId]->active_array_size.height};
9124 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9125 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9126
9127 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9128 &gCamCapability[cameraId]->white_level, 1);
9129
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009130 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9131 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9132 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009133 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009134 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009135
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009136#ifndef USE_HAL_3_3
9137 bool hasBlackRegions = false;
9138 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9139 LOGW("black_region_count: %d is bounded to %d",
9140 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9141 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9142 }
9143 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9144 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9145 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9146 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9147 }
9148 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9149 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9150 hasBlackRegions = true;
9151 }
9152#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009153 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9154 &gCamCapability[cameraId]->flash_charge_duration, 1);
9155
9156 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9157 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9158
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009159 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9160 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9161 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009162 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9163 &timestampSource, 1);
9164
Thierry Strudel54dc9782017-02-15 12:12:10 -08009165 //update histogram vendor data
9166 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009167 &gCamCapability[cameraId]->histogram_size, 1);
9168
Thierry Strudel54dc9782017-02-15 12:12:10 -08009169 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009170 &gCamCapability[cameraId]->max_histogram_count, 1);
9171
Shuzhen Wang14415f52016-11-16 18:26:18 -08009172 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9173 //so that app can request fewer number of bins than the maximum supported.
9174 std::vector<int32_t> histBins;
9175 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9176 histBins.push_back(maxHistBins);
9177 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9178 (maxHistBins & 0x1) == 0) {
9179 histBins.push_back(maxHistBins >> 1);
9180 maxHistBins >>= 1;
9181 }
9182 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9183 histBins.data(), histBins.size());
9184
Thierry Strudel3d639192016-09-09 11:52:26 -07009185 int32_t sharpness_map_size[] = {
9186 gCamCapability[cameraId]->sharpness_map_size.width,
9187 gCamCapability[cameraId]->sharpness_map_size.height};
9188
9189 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9190 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9191
9192 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9193 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9194
Emilian Peev0f3c3162017-03-15 12:57:46 +00009195 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9196 if (0 <= indexPD) {
9197 // Advertise PD stats data as part of the Depth capabilities
9198 int32_t depthWidth =
9199 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9200 int32_t depthHeight =
9201 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9202 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9203 assert(0 < depthSamplesCount);
9204 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9205 &depthSamplesCount, 1);
9206
9207 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9208 depthHeight,
9209 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9210 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9211 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9212 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9213 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9214
9215 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9216 depthHeight, 33333333,
9217 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9218 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9219 depthMinDuration,
9220 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9221
9222 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9223 depthHeight, 0,
9224 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9225 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9226 depthStallDuration,
9227 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9228
9229 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9230 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9231 }
9232
Thierry Strudel3d639192016-09-09 11:52:26 -07009233 int32_t scalar_formats[] = {
9234 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9235 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9236 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9237 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9238 HAL_PIXEL_FORMAT_RAW10,
9239 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009240 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9241 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9242 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009243
9244 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9245 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9246 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9247 count, MAX_SIZES_CNT, available_processed_sizes);
9248 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9249 available_processed_sizes, count * 2);
9250
9251 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9252 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9253 makeTable(gCamCapability[cameraId]->raw_dim,
9254 count, MAX_SIZES_CNT, available_raw_sizes);
9255 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9256 available_raw_sizes, count * 2);
9257
9258 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9259 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9260 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9261 count, MAX_SIZES_CNT, available_fps_ranges);
9262 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9263 available_fps_ranges, count * 2);
9264
9265 camera_metadata_rational exposureCompensationStep = {
9266 gCamCapability[cameraId]->exp_compensation_step.numerator,
9267 gCamCapability[cameraId]->exp_compensation_step.denominator};
9268 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9269 &exposureCompensationStep, 1);
9270
9271 Vector<uint8_t> availableVstabModes;
9272 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9273 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009274 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009275 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009276 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009277 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009278 count = IS_TYPE_MAX;
9279 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9280 for (size_t i = 0; i < count; i++) {
9281 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9282 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9283 eisSupported = true;
9284 break;
9285 }
9286 }
9287 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009288 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9289 }
9290 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9291 availableVstabModes.array(), availableVstabModes.size());
9292
9293 /*HAL 1 and HAL 3 common*/
9294 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9295 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9296 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009297 // Cap the max zoom to the max preferred value
9298 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009299 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9300 &maxZoom, 1);
9301
9302 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9303 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9304
9305 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9306 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9307 max3aRegions[2] = 0; /* AF not supported */
9308 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9309 max3aRegions, 3);
9310
9311 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9312 memset(prop, 0, sizeof(prop));
9313 property_get("persist.camera.facedetect", prop, "1");
9314 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9315 LOGD("Support face detection mode: %d",
9316 supportedFaceDetectMode);
9317
9318 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009319 /* support mode should be OFF if max number of face is 0 */
9320 if (maxFaces <= 0) {
9321 supportedFaceDetectMode = 0;
9322 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009323 Vector<uint8_t> availableFaceDetectModes;
9324 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9325 if (supportedFaceDetectMode == 1) {
9326 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9327 } else if (supportedFaceDetectMode == 2) {
9328 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9329 } else if (supportedFaceDetectMode == 3) {
9330 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9331 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9332 } else {
9333 maxFaces = 0;
9334 }
9335 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9336 availableFaceDetectModes.array(),
9337 availableFaceDetectModes.size());
9338 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9339 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009340 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9341 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9342 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009343
9344 int32_t exposureCompensationRange[] = {
9345 gCamCapability[cameraId]->exposure_compensation_min,
9346 gCamCapability[cameraId]->exposure_compensation_max};
9347 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9348 exposureCompensationRange,
9349 sizeof(exposureCompensationRange)/sizeof(int32_t));
9350
9351 uint8_t lensFacing = (facingBack) ?
9352 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9353 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9354
9355 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9356 available_thumbnail_sizes,
9357 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9358
9359 /*all sizes will be clubbed into this tag*/
9360 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9361 /*android.scaler.availableStreamConfigurations*/
9362 Vector<int32_t> available_stream_configs;
9363 cam_dimension_t active_array_dim;
9364 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9365 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009366
9367 /*advertise list of input dimensions supported based on below property.
9368 By default all sizes upto 5MP will be advertised.
9369 Note that the setprop resolution format should be WxH.
9370 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9371 To list all supported sizes, setprop needs to be set with "0x0" */
9372 cam_dimension_t minInputSize = {2592,1944}; //5MP
9373 memset(prop, 0, sizeof(prop));
9374 property_get("persist.camera.input.minsize", prop, "2592x1944");
9375 if (strlen(prop) > 0) {
9376 char *saveptr = NULL;
9377 char *token = strtok_r(prop, "x", &saveptr);
9378 if (token != NULL) {
9379 minInputSize.width = atoi(token);
9380 }
9381 token = strtok_r(NULL, "x", &saveptr);
9382 if (token != NULL) {
9383 minInputSize.height = atoi(token);
9384 }
9385 }
9386
Thierry Strudel3d639192016-09-09 11:52:26 -07009387 /* Add input/output stream configurations for each scalar formats*/
9388 for (size_t j = 0; j < scalar_formats_count; j++) {
9389 switch (scalar_formats[j]) {
9390 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9391 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9392 case HAL_PIXEL_FORMAT_RAW10:
9393 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9394 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9395 addStreamConfig(available_stream_configs, scalar_formats[j],
9396 gCamCapability[cameraId]->raw_dim[i],
9397 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9398 }
9399 break;
9400 case HAL_PIXEL_FORMAT_BLOB:
9401 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9402 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9403 addStreamConfig(available_stream_configs, scalar_formats[j],
9404 gCamCapability[cameraId]->picture_sizes_tbl[i],
9405 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9406 }
9407 break;
9408 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9409 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9410 default:
9411 cam_dimension_t largest_picture_size;
9412 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9413 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9414 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9415 addStreamConfig(available_stream_configs, scalar_formats[j],
9416 gCamCapability[cameraId]->picture_sizes_tbl[i],
9417 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009418 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009419 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9420 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009421 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9422 >= minInputSize.width) || (gCamCapability[cameraId]->
9423 picture_sizes_tbl[i].height >= minInputSize.height)) {
9424 addStreamConfig(available_stream_configs, scalar_formats[j],
9425 gCamCapability[cameraId]->picture_sizes_tbl[i],
9426 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9427 }
9428 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009429 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009430
Thierry Strudel3d639192016-09-09 11:52:26 -07009431 break;
9432 }
9433 }
9434
9435 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9436 available_stream_configs.array(), available_stream_configs.size());
9437 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9438 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9439
9440 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9441 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9442
9443 /* android.scaler.availableMinFrameDurations */
9444 Vector<int64_t> available_min_durations;
9445 for (size_t j = 0; j < scalar_formats_count; j++) {
9446 switch (scalar_formats[j]) {
9447 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9448 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9449 case HAL_PIXEL_FORMAT_RAW10:
9450 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9451 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9452 available_min_durations.add(scalar_formats[j]);
9453 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9454 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9455 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9456 }
9457 break;
9458 default:
9459 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9460 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9461 available_min_durations.add(scalar_formats[j]);
9462 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9463 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9464 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9465 }
9466 break;
9467 }
9468 }
9469 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9470 available_min_durations.array(), available_min_durations.size());
9471
9472 Vector<int32_t> available_hfr_configs;
9473 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9474 int32_t fps = 0;
9475 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9476 case CAM_HFR_MODE_60FPS:
9477 fps = 60;
9478 break;
9479 case CAM_HFR_MODE_90FPS:
9480 fps = 90;
9481 break;
9482 case CAM_HFR_MODE_120FPS:
9483 fps = 120;
9484 break;
9485 case CAM_HFR_MODE_150FPS:
9486 fps = 150;
9487 break;
9488 case CAM_HFR_MODE_180FPS:
9489 fps = 180;
9490 break;
9491 case CAM_HFR_MODE_210FPS:
9492 fps = 210;
9493 break;
9494 case CAM_HFR_MODE_240FPS:
9495 fps = 240;
9496 break;
9497 case CAM_HFR_MODE_480FPS:
9498 fps = 480;
9499 break;
9500 case CAM_HFR_MODE_OFF:
9501 case CAM_HFR_MODE_MAX:
9502 default:
9503 break;
9504 }
9505
9506 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9507 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9508 /* For each HFR frame rate, need to advertise one variable fps range
9509 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9510 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9511 * set by the app. When video recording is started, [120, 120] is
9512 * set. This way sensor configuration does not change when recording
9513 * is started */
9514
9515 /* (width, height, fps_min, fps_max, batch_size_max) */
9516 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9517 j < MAX_SIZES_CNT; j++) {
9518 available_hfr_configs.add(
9519 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9520 available_hfr_configs.add(
9521 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9522 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9523 available_hfr_configs.add(fps);
9524 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9525
9526 /* (width, height, fps_min, fps_max, batch_size_max) */
9527 available_hfr_configs.add(
9528 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9529 available_hfr_configs.add(
9530 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9531 available_hfr_configs.add(fps);
9532 available_hfr_configs.add(fps);
9533 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9534 }
9535 }
9536 }
9537 //Advertise HFR capability only if the property is set
9538 memset(prop, 0, sizeof(prop));
9539 property_get("persist.camera.hal3hfr.enable", prop, "1");
9540 uint8_t hfrEnable = (uint8_t)atoi(prop);
9541
9542 if(hfrEnable && available_hfr_configs.array()) {
9543 staticInfo.update(
9544 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9545 available_hfr_configs.array(), available_hfr_configs.size());
9546 }
9547
9548 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9549 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9550 &max_jpeg_size, 1);
9551
9552 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9553 size_t size = 0;
9554 count = CAM_EFFECT_MODE_MAX;
9555 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9556 for (size_t i = 0; i < count; i++) {
9557 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9558 gCamCapability[cameraId]->supported_effects[i]);
9559 if (NAME_NOT_FOUND != val) {
9560 avail_effects[size] = (uint8_t)val;
9561 size++;
9562 }
9563 }
9564 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9565 avail_effects,
9566 size);
9567
9568 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9569 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9570 size_t supported_scene_modes_cnt = 0;
9571 count = CAM_SCENE_MODE_MAX;
9572 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9573 for (size_t i = 0; i < count; i++) {
9574 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9575 CAM_SCENE_MODE_OFF) {
9576 int val = lookupFwkName(SCENE_MODES_MAP,
9577 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9578 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009579
Thierry Strudel3d639192016-09-09 11:52:26 -07009580 if (NAME_NOT_FOUND != val) {
9581 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9582 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9583 supported_scene_modes_cnt++;
9584 }
9585 }
9586 }
9587 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9588 avail_scene_modes,
9589 supported_scene_modes_cnt);
9590
9591 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9592 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9593 supported_scene_modes_cnt,
9594 CAM_SCENE_MODE_MAX,
9595 scene_mode_overrides,
9596 supported_indexes,
9597 cameraId);
9598
9599 if (supported_scene_modes_cnt == 0) {
9600 supported_scene_modes_cnt = 1;
9601 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9602 }
9603
9604 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9605 scene_mode_overrides, supported_scene_modes_cnt * 3);
9606
9607 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9608 ANDROID_CONTROL_MODE_AUTO,
9609 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9610 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9611 available_control_modes,
9612 3);
9613
9614 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9615 size = 0;
9616 count = CAM_ANTIBANDING_MODE_MAX;
9617 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9618 for (size_t i = 0; i < count; i++) {
9619 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9620 gCamCapability[cameraId]->supported_antibandings[i]);
9621 if (NAME_NOT_FOUND != val) {
9622 avail_antibanding_modes[size] = (uint8_t)val;
9623 size++;
9624 }
9625
9626 }
9627 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9628 avail_antibanding_modes,
9629 size);
9630
9631 uint8_t avail_abberation_modes[] = {
9632 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9633 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9634 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9635 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9636 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9637 if (0 == count) {
9638 // If no aberration correction modes are available for a device, this advertise OFF mode
9639 size = 1;
9640 } else {
9641 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9642 // So, advertize all 3 modes if atleast any one mode is supported as per the
9643 // new M requirement
9644 size = 3;
9645 }
9646 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9647 avail_abberation_modes,
9648 size);
9649
9650 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9651 size = 0;
9652 count = CAM_FOCUS_MODE_MAX;
9653 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9654 for (size_t i = 0; i < count; i++) {
9655 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9656 gCamCapability[cameraId]->supported_focus_modes[i]);
9657 if (NAME_NOT_FOUND != val) {
9658 avail_af_modes[size] = (uint8_t)val;
9659 size++;
9660 }
9661 }
9662 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9663 avail_af_modes,
9664 size);
9665
9666 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9667 size = 0;
9668 count = CAM_WB_MODE_MAX;
9669 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9670 for (size_t i = 0; i < count; i++) {
9671 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9672 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9673 gCamCapability[cameraId]->supported_white_balances[i]);
9674 if (NAME_NOT_FOUND != val) {
9675 avail_awb_modes[size] = (uint8_t)val;
9676 size++;
9677 }
9678 }
9679 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9680 avail_awb_modes,
9681 size);
9682
9683 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9684 count = CAM_FLASH_FIRING_LEVEL_MAX;
9685 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9686 count);
9687 for (size_t i = 0; i < count; i++) {
9688 available_flash_levels[i] =
9689 gCamCapability[cameraId]->supported_firing_levels[i];
9690 }
9691 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9692 available_flash_levels, count);
9693
9694 uint8_t flashAvailable;
9695 if (gCamCapability[cameraId]->flash_available)
9696 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9697 else
9698 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9699 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9700 &flashAvailable, 1);
9701
9702 Vector<uint8_t> avail_ae_modes;
9703 count = CAM_AE_MODE_MAX;
9704 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9705 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009706 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9707 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9708 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9709 }
9710 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009711 }
9712 if (flashAvailable) {
9713 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9714 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9715 }
9716 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9717 avail_ae_modes.array(),
9718 avail_ae_modes.size());
9719
9720 int32_t sensitivity_range[2];
9721 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9722 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9723 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9724 sensitivity_range,
9725 sizeof(sensitivity_range) / sizeof(int32_t));
9726
9727 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9728 &gCamCapability[cameraId]->max_analog_sensitivity,
9729 1);
9730
9731 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9732 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9733 &sensor_orientation,
9734 1);
9735
9736 int32_t max_output_streams[] = {
9737 MAX_STALLING_STREAMS,
9738 MAX_PROCESSED_STREAMS,
9739 MAX_RAW_STREAMS};
9740 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9741 max_output_streams,
9742 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9743
9744 uint8_t avail_leds = 0;
9745 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9746 &avail_leds, 0);
9747
9748 uint8_t focus_dist_calibrated;
9749 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9750 gCamCapability[cameraId]->focus_dist_calibrated);
9751 if (NAME_NOT_FOUND != val) {
9752 focus_dist_calibrated = (uint8_t)val;
9753 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9754 &focus_dist_calibrated, 1);
9755 }
9756
9757 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9758 size = 0;
9759 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9760 MAX_TEST_PATTERN_CNT);
9761 for (size_t i = 0; i < count; i++) {
9762 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9763 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9764 if (NAME_NOT_FOUND != testpatternMode) {
9765 avail_testpattern_modes[size] = testpatternMode;
9766 size++;
9767 }
9768 }
9769 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9770 avail_testpattern_modes,
9771 size);
9772
9773 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9774 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9775 &max_pipeline_depth,
9776 1);
9777
9778 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9779 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9780 &partial_result_count,
9781 1);
9782
9783 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9784 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9785
9786 Vector<uint8_t> available_capabilities;
9787 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9788 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9789 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9790 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9791 if (supportBurst) {
9792 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9793 }
9794 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9795 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9796 if (hfrEnable && available_hfr_configs.array()) {
9797 available_capabilities.add(
9798 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9799 }
9800
9801 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9802 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9803 }
9804 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9805 available_capabilities.array(),
9806 available_capabilities.size());
9807
9808 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9809 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9810 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9811 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9812
9813 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9814 &aeLockAvailable, 1);
9815
9816 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9817 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9818 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9819 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9820
9821 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9822 &awbLockAvailable, 1);
9823
9824 int32_t max_input_streams = 1;
9825 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9826 &max_input_streams,
9827 1);
9828
9829 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9830 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9831 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9832 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9833 HAL_PIXEL_FORMAT_YCbCr_420_888};
9834 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9835 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9836
9837 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9838 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9839 &max_latency,
9840 1);
9841
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009842#ifndef USE_HAL_3_3
9843 int32_t isp_sensitivity_range[2];
9844 isp_sensitivity_range[0] =
9845 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9846 isp_sensitivity_range[1] =
9847 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9848 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9849 isp_sensitivity_range,
9850 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9851#endif
9852
Thierry Strudel3d639192016-09-09 11:52:26 -07009853 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9854 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9855 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9856 available_hot_pixel_modes,
9857 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9858
9859 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9860 ANDROID_SHADING_MODE_FAST,
9861 ANDROID_SHADING_MODE_HIGH_QUALITY};
9862 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9863 available_shading_modes,
9864 3);
9865
9866 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9867 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9868 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9869 available_lens_shading_map_modes,
9870 2);
9871
9872 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9873 ANDROID_EDGE_MODE_FAST,
9874 ANDROID_EDGE_MODE_HIGH_QUALITY,
9875 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9876 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9877 available_edge_modes,
9878 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9879
9880 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9881 ANDROID_NOISE_REDUCTION_MODE_FAST,
9882 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9883 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9884 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9885 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9886 available_noise_red_modes,
9887 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9888
9889 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9890 ANDROID_TONEMAP_MODE_FAST,
9891 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9892 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9893 available_tonemap_modes,
9894 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9895
9896 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9897 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9898 available_hot_pixel_map_modes,
9899 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9900
9901 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9902 gCamCapability[cameraId]->reference_illuminant1);
9903 if (NAME_NOT_FOUND != val) {
9904 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9905 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9906 }
9907
9908 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9909 gCamCapability[cameraId]->reference_illuminant2);
9910 if (NAME_NOT_FOUND != val) {
9911 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9912 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9913 }
9914
9915 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9916 (void *)gCamCapability[cameraId]->forward_matrix1,
9917 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9918
9919 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9920 (void *)gCamCapability[cameraId]->forward_matrix2,
9921 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9922
9923 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9924 (void *)gCamCapability[cameraId]->color_transform1,
9925 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9926
9927 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9928 (void *)gCamCapability[cameraId]->color_transform2,
9929 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9930
9931 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9932 (void *)gCamCapability[cameraId]->calibration_transform1,
9933 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9934
9935 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9936 (void *)gCamCapability[cameraId]->calibration_transform2,
9937 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9938
9939 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9940 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9941 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9942 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9943 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9944 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9945 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9946 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9947 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9948 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9949 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9950 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9951 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9952 ANDROID_JPEG_GPS_COORDINATES,
9953 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9954 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9955 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9956 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9957 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9958 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9959 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9960 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9961 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9962 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009963#ifndef USE_HAL_3_3
9964 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9965#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009966 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009967 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009968 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9969 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009970 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009971 /* DevCamDebug metadata request_keys_basic */
9972 DEVCAMDEBUG_META_ENABLE,
9973 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009974 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07009975 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07009976 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -07009977 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -08009978 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009979
9980 size_t request_keys_cnt =
9981 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9982 Vector<int32_t> available_request_keys;
9983 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9984 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9985 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9986 }
9987
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009988 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07009989 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
9990 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
9991 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009992 }
9993
Thierry Strudel3d639192016-09-09 11:52:26 -07009994 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9995 available_request_keys.array(), available_request_keys.size());
9996
9997 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9998 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9999 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10000 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10001 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10002 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10003 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10004 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10005 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10006 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10007 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10008 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10009 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10010 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10011 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10012 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10013 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010014 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010015 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10016 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10017 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010018 ANDROID_STATISTICS_FACE_SCORES,
10019#ifndef USE_HAL_3_3
10020 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10021#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010022 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010023 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010024 // DevCamDebug metadata result_keys_basic
10025 DEVCAMDEBUG_META_ENABLE,
10026 // DevCamDebug metadata result_keys AF
10027 DEVCAMDEBUG_AF_LENS_POSITION,
10028 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10029 DEVCAMDEBUG_AF_TOF_DISTANCE,
10030 DEVCAMDEBUG_AF_LUMA,
10031 DEVCAMDEBUG_AF_HAF_STATE,
10032 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10033 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10034 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10035 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10036 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10037 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10038 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10039 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10040 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10041 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10042 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10043 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10044 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10045 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10046 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10047 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10048 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10049 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10050 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10051 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10052 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10053 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10054 // DevCamDebug metadata result_keys AEC
10055 DEVCAMDEBUG_AEC_TARGET_LUMA,
10056 DEVCAMDEBUG_AEC_COMP_LUMA,
10057 DEVCAMDEBUG_AEC_AVG_LUMA,
10058 DEVCAMDEBUG_AEC_CUR_LUMA,
10059 DEVCAMDEBUG_AEC_LINECOUNT,
10060 DEVCAMDEBUG_AEC_REAL_GAIN,
10061 DEVCAMDEBUG_AEC_EXP_INDEX,
10062 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010063 // DevCamDebug metadata result_keys zzHDR
10064 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10065 DEVCAMDEBUG_AEC_L_LINECOUNT,
10066 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10067 DEVCAMDEBUG_AEC_S_LINECOUNT,
10068 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10069 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10070 // DevCamDebug metadata result_keys ADRC
10071 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10072 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10073 DEVCAMDEBUG_AEC_GTM_RATIO,
10074 DEVCAMDEBUG_AEC_LTM_RATIO,
10075 DEVCAMDEBUG_AEC_LA_RATIO,
10076 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010077 // DevCamDebug metadata result_keys AWB
10078 DEVCAMDEBUG_AWB_R_GAIN,
10079 DEVCAMDEBUG_AWB_G_GAIN,
10080 DEVCAMDEBUG_AWB_B_GAIN,
10081 DEVCAMDEBUG_AWB_CCT,
10082 DEVCAMDEBUG_AWB_DECISION,
10083 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010084 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10085 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10086 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010087 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010088 };
10089
Thierry Strudel3d639192016-09-09 11:52:26 -070010090 size_t result_keys_cnt =
10091 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10092
10093 Vector<int32_t> available_result_keys;
10094 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10095 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10096 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10097 }
10098 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10099 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10100 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10101 }
10102 if (supportedFaceDetectMode == 1) {
10103 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10104 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10105 } else if ((supportedFaceDetectMode == 2) ||
10106 (supportedFaceDetectMode == 3)) {
10107 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10108 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10109 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010110#ifndef USE_HAL_3_3
10111 if (hasBlackRegions) {
10112 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10113 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10114 }
10115#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010116
10117 if (gExposeEnableZslKey) {
10118 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10119 }
10120
Thierry Strudel3d639192016-09-09 11:52:26 -070010121 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10122 available_result_keys.array(), available_result_keys.size());
10123
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010124 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010125 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10126 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10127 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10128 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10129 ANDROID_SCALER_CROPPING_TYPE,
10130 ANDROID_SYNC_MAX_LATENCY,
10131 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10132 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10133 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10134 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10135 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10136 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10137 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10138 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10139 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10140 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10141 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10142 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10143 ANDROID_LENS_FACING,
10144 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10145 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10146 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10147 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10148 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10149 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10150 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10151 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10152 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10153 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10154 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10155 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10156 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10157 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10158 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10159 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10160 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10161 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10162 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10163 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010164 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010165 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10166 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10167 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10168 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10169 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10170 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10171 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10172 ANDROID_CONTROL_AVAILABLE_MODES,
10173 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10174 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10175 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10176 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010177 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10178#ifndef USE_HAL_3_3
10179 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10180 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10181#endif
10182 };
10183
10184 Vector<int32_t> available_characteristics_keys;
10185 available_characteristics_keys.appendArray(characteristics_keys_basic,
10186 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10187#ifndef USE_HAL_3_3
10188 if (hasBlackRegions) {
10189 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10190 }
10191#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010192
10193 if (0 <= indexPD) {
10194 int32_t depthKeys[] = {
10195 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10196 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10197 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10198 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10199 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10200 };
10201 available_characteristics_keys.appendArray(depthKeys,
10202 sizeof(depthKeys) / sizeof(depthKeys[0]));
10203 }
10204
Thierry Strudel3d639192016-09-09 11:52:26 -070010205 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010206 available_characteristics_keys.array(),
10207 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010208
10209 /*available stall durations depend on the hw + sw and will be different for different devices */
10210 /*have to add for raw after implementation*/
10211 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10212 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10213
10214 Vector<int64_t> available_stall_durations;
10215 for (uint32_t j = 0; j < stall_formats_count; j++) {
10216 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10217 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10218 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10219 available_stall_durations.add(stall_formats[j]);
10220 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10221 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10222 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10223 }
10224 } else {
10225 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10226 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10227 available_stall_durations.add(stall_formats[j]);
10228 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10229 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10230 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10231 }
10232 }
10233 }
10234 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10235 available_stall_durations.array(),
10236 available_stall_durations.size());
10237
10238 //QCAMERA3_OPAQUE_RAW
10239 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10240 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10241 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10242 case LEGACY_RAW:
10243 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10244 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10245 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10246 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10247 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10248 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10249 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10250 break;
10251 case MIPI_RAW:
10252 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10253 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10254 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10255 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10256 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10257 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10258 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10259 break;
10260 default:
10261 LOGE("unknown opaque_raw_format %d",
10262 gCamCapability[cameraId]->opaque_raw_fmt);
10263 break;
10264 }
10265 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10266
10267 Vector<int32_t> strides;
10268 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10269 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10270 cam_stream_buf_plane_info_t buf_planes;
10271 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10272 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10273 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10274 &gCamCapability[cameraId]->padding_info, &buf_planes);
10275 strides.add(buf_planes.plane_info.mp[0].stride);
10276 }
10277 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10278 strides.size());
10279
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010280 //TBD: remove the following line once backend advertises zzHDR in feature mask
10281 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010282 //Video HDR default
10283 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10284 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010285 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010286 int32_t vhdr_mode[] = {
10287 QCAMERA3_VIDEO_HDR_MODE_OFF,
10288 QCAMERA3_VIDEO_HDR_MODE_ON};
10289
10290 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10291 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10292 vhdr_mode, vhdr_mode_count);
10293 }
10294
Thierry Strudel3d639192016-09-09 11:52:26 -070010295 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10296 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10297 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10298
10299 uint8_t isMonoOnly =
10300 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10301 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10302 &isMonoOnly, 1);
10303
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010304#ifndef USE_HAL_3_3
10305 Vector<int32_t> opaque_size;
10306 for (size_t j = 0; j < scalar_formats_count; j++) {
10307 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10308 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10309 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10310 cam_stream_buf_plane_info_t buf_planes;
10311
10312 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10313 &gCamCapability[cameraId]->padding_info, &buf_planes);
10314
10315 if (rc == 0) {
10316 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10317 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10318 opaque_size.add(buf_planes.plane_info.frame_len);
10319 }else {
10320 LOGE("raw frame calculation failed!");
10321 }
10322 }
10323 }
10324 }
10325
10326 if ((opaque_size.size() > 0) &&
10327 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10328 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10329 else
10330 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10331#endif
10332
Thierry Strudel04e026f2016-10-10 11:27:36 -070010333 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10334 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10335 size = 0;
10336 count = CAM_IR_MODE_MAX;
10337 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10338 for (size_t i = 0; i < count; i++) {
10339 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10340 gCamCapability[cameraId]->supported_ir_modes[i]);
10341 if (NAME_NOT_FOUND != val) {
10342 avail_ir_modes[size] = (int32_t)val;
10343 size++;
10344 }
10345 }
10346 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10347 avail_ir_modes, size);
10348 }
10349
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010350 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10351 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10352 size = 0;
10353 count = CAM_AEC_CONVERGENCE_MAX;
10354 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10355 for (size_t i = 0; i < count; i++) {
10356 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10357 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10358 if (NAME_NOT_FOUND != val) {
10359 available_instant_aec_modes[size] = (int32_t)val;
10360 size++;
10361 }
10362 }
10363 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10364 available_instant_aec_modes, size);
10365 }
10366
Thierry Strudel54dc9782017-02-15 12:12:10 -080010367 int32_t sharpness_range[] = {
10368 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10369 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10370 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10371
10372 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10373 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10374 size = 0;
10375 count = CAM_BINNING_CORRECTION_MODE_MAX;
10376 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10377 for (size_t i = 0; i < count; i++) {
10378 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10379 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10380 gCamCapability[cameraId]->supported_binning_modes[i]);
10381 if (NAME_NOT_FOUND != val) {
10382 avail_binning_modes[size] = (int32_t)val;
10383 size++;
10384 }
10385 }
10386 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10387 avail_binning_modes, size);
10388 }
10389
10390 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10391 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10392 size = 0;
10393 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10394 for (size_t i = 0; i < count; i++) {
10395 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10396 gCamCapability[cameraId]->supported_aec_modes[i]);
10397 if (NAME_NOT_FOUND != val)
10398 available_aec_modes[size++] = val;
10399 }
10400 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10401 available_aec_modes, size);
10402 }
10403
10404 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10405 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10406 size = 0;
10407 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10408 for (size_t i = 0; i < count; i++) {
10409 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10410 gCamCapability[cameraId]->supported_iso_modes[i]);
10411 if (NAME_NOT_FOUND != val)
10412 available_iso_modes[size++] = val;
10413 }
10414 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10415 available_iso_modes, size);
10416 }
10417
10418 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010419 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010420 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10421 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10422 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10423
10424 int32_t available_saturation_range[4];
10425 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10426 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10427 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10428 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10429 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10430 available_saturation_range, 4);
10431
10432 uint8_t is_hdr_values[2];
10433 is_hdr_values[0] = 0;
10434 is_hdr_values[1] = 1;
10435 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10436 is_hdr_values, 2);
10437
10438 float is_hdr_confidence_range[2];
10439 is_hdr_confidence_range[0] = 0.0;
10440 is_hdr_confidence_range[1] = 1.0;
10441 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10442 is_hdr_confidence_range, 2);
10443
Emilian Peev0a972ef2017-03-16 10:25:53 +000010444 size_t eepromLength = strnlen(
10445 reinterpret_cast<const char *>(
10446 gCamCapability[cameraId]->eeprom_version_info),
10447 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10448 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010449 char easelInfo[] = ",E:N";
10450 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10451 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10452 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010453 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10454 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010455 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010456 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10457 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10458 }
10459
Thierry Strudel3d639192016-09-09 11:52:26 -070010460 gStaticMetadata[cameraId] = staticInfo.release();
10461 return rc;
10462}
10463
10464/*===========================================================================
10465 * FUNCTION : makeTable
10466 *
10467 * DESCRIPTION: make a table of sizes
10468 *
10469 * PARAMETERS :
10470 *
10471 *
10472 *==========================================================================*/
10473void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10474 size_t max_size, int32_t *sizeTable)
10475{
10476 size_t j = 0;
10477 if (size > max_size) {
10478 size = max_size;
10479 }
10480 for (size_t i = 0; i < size; i++) {
10481 sizeTable[j] = dimTable[i].width;
10482 sizeTable[j+1] = dimTable[i].height;
10483 j+=2;
10484 }
10485}
10486
10487/*===========================================================================
10488 * FUNCTION : makeFPSTable
10489 *
10490 * DESCRIPTION: make a table of fps ranges
10491 *
10492 * PARAMETERS :
10493 *
10494 *==========================================================================*/
10495void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10496 size_t max_size, int32_t *fpsRangesTable)
10497{
10498 size_t j = 0;
10499 if (size > max_size) {
10500 size = max_size;
10501 }
10502 for (size_t i = 0; i < size; i++) {
10503 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10504 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10505 j+=2;
10506 }
10507}
10508
10509/*===========================================================================
10510 * FUNCTION : makeOverridesList
10511 *
10512 * DESCRIPTION: make a list of scene mode overrides
10513 *
10514 * PARAMETERS :
10515 *
10516 *
10517 *==========================================================================*/
10518void QCamera3HardwareInterface::makeOverridesList(
10519 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10520 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10521{
10522 /*daemon will give a list of overrides for all scene modes.
10523 However we should send the fwk only the overrides for the scene modes
10524 supported by the framework*/
10525 size_t j = 0;
10526 if (size > max_size) {
10527 size = max_size;
10528 }
10529 size_t focus_count = CAM_FOCUS_MODE_MAX;
10530 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10531 focus_count);
10532 for (size_t i = 0; i < size; i++) {
10533 bool supt = false;
10534 size_t index = supported_indexes[i];
10535 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10536 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10537 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10538 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10539 overridesTable[index].awb_mode);
10540 if (NAME_NOT_FOUND != val) {
10541 overridesList[j+1] = (uint8_t)val;
10542 }
10543 uint8_t focus_override = overridesTable[index].af_mode;
10544 for (size_t k = 0; k < focus_count; k++) {
10545 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10546 supt = true;
10547 break;
10548 }
10549 }
10550 if (supt) {
10551 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10552 focus_override);
10553 if (NAME_NOT_FOUND != val) {
10554 overridesList[j+2] = (uint8_t)val;
10555 }
10556 } else {
10557 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10558 }
10559 j+=3;
10560 }
10561}
10562
10563/*===========================================================================
10564 * FUNCTION : filterJpegSizes
10565 *
10566 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10567 * could be downscaled to
10568 *
10569 * PARAMETERS :
10570 *
10571 * RETURN : length of jpegSizes array
10572 *==========================================================================*/
10573
10574size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10575 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10576 uint8_t downscale_factor)
10577{
10578 if (0 == downscale_factor) {
10579 downscale_factor = 1;
10580 }
10581
10582 int32_t min_width = active_array_size.width / downscale_factor;
10583 int32_t min_height = active_array_size.height / downscale_factor;
10584 size_t jpegSizesCnt = 0;
10585 if (processedSizesCnt > maxCount) {
10586 processedSizesCnt = maxCount;
10587 }
10588 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10589 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10590 jpegSizes[jpegSizesCnt] = processedSizes[i];
10591 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10592 jpegSizesCnt += 2;
10593 }
10594 }
10595 return jpegSizesCnt;
10596}
10597
10598/*===========================================================================
10599 * FUNCTION : computeNoiseModelEntryS
10600 *
10601 * DESCRIPTION: function to map a given sensitivity to the S noise
10602 * model parameters in the DNG noise model.
10603 *
10604 * PARAMETERS : sens : the sensor sensitivity
10605 *
10606 ** RETURN : S (sensor amplification) noise
10607 *
10608 *==========================================================================*/
10609double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10610 double s = gCamCapability[mCameraId]->gradient_S * sens +
10611 gCamCapability[mCameraId]->offset_S;
10612 return ((s < 0.0) ? 0.0 : s);
10613}
10614
10615/*===========================================================================
10616 * FUNCTION : computeNoiseModelEntryO
10617 *
10618 * DESCRIPTION: function to map a given sensitivity to the O noise
10619 * model parameters in the DNG noise model.
10620 *
10621 * PARAMETERS : sens : the sensor sensitivity
10622 *
10623 ** RETURN : O (sensor readout) noise
10624 *
10625 *==========================================================================*/
10626double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10627 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10628 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10629 1.0 : (1.0 * sens / max_analog_sens);
10630 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10631 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10632 return ((o < 0.0) ? 0.0 : o);
10633}
10634
10635/*===========================================================================
10636 * FUNCTION : getSensorSensitivity
10637 *
10638 * DESCRIPTION: convert iso_mode to an integer value
10639 *
10640 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10641 *
10642 ** RETURN : sensitivity supported by sensor
10643 *
10644 *==========================================================================*/
10645int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10646{
10647 int32_t sensitivity;
10648
10649 switch (iso_mode) {
10650 case CAM_ISO_MODE_100:
10651 sensitivity = 100;
10652 break;
10653 case CAM_ISO_MODE_200:
10654 sensitivity = 200;
10655 break;
10656 case CAM_ISO_MODE_400:
10657 sensitivity = 400;
10658 break;
10659 case CAM_ISO_MODE_800:
10660 sensitivity = 800;
10661 break;
10662 case CAM_ISO_MODE_1600:
10663 sensitivity = 1600;
10664 break;
10665 default:
10666 sensitivity = -1;
10667 break;
10668 }
10669 return sensitivity;
10670}
10671
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010672int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010673 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010674 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10675 // to connect to Easel.
10676 bool doNotpowerOnEasel =
10677 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10678
10679 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010680 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10681 return OK;
10682 }
10683
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010684 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010685 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010686 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010687 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010688 return res;
10689 }
10690
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010691 EaselManagerClientOpened = true;
10692
10693 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010694 if (res != OK) {
10695 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10696 }
10697
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010698 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010699 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010700
10701 // Expose enableZsl key only when HDR+ mode is enabled.
10702 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010703 }
10704
10705 return OK;
10706}
10707
Thierry Strudel3d639192016-09-09 11:52:26 -070010708/*===========================================================================
10709 * FUNCTION : getCamInfo
10710 *
10711 * DESCRIPTION: query camera capabilities
10712 *
10713 * PARAMETERS :
10714 * @cameraId : camera Id
10715 * @info : camera info struct to be filled in with camera capabilities
10716 *
10717 * RETURN : int type of status
10718 * NO_ERROR -- success
10719 * none-zero failure code
10720 *==========================================================================*/
10721int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10722 struct camera_info *info)
10723{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010724 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010725 int rc = 0;
10726
10727 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010728
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010729 {
10730 Mutex::Autolock l(gHdrPlusClientLock);
10731 rc = initHdrPlusClientLocked();
10732 if (rc != OK) {
10733 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10734 pthread_mutex_unlock(&gCamLock);
10735 return rc;
10736 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010737 }
10738
Thierry Strudel3d639192016-09-09 11:52:26 -070010739 if (NULL == gCamCapability[cameraId]) {
10740 rc = initCapabilities(cameraId);
10741 if (rc < 0) {
10742 pthread_mutex_unlock(&gCamLock);
10743 return rc;
10744 }
10745 }
10746
10747 if (NULL == gStaticMetadata[cameraId]) {
10748 rc = initStaticMetadata(cameraId);
10749 if (rc < 0) {
10750 pthread_mutex_unlock(&gCamLock);
10751 return rc;
10752 }
10753 }
10754
10755 switch(gCamCapability[cameraId]->position) {
10756 case CAM_POSITION_BACK:
10757 case CAM_POSITION_BACK_AUX:
10758 info->facing = CAMERA_FACING_BACK;
10759 break;
10760
10761 case CAM_POSITION_FRONT:
10762 case CAM_POSITION_FRONT_AUX:
10763 info->facing = CAMERA_FACING_FRONT;
10764 break;
10765
10766 default:
10767 LOGE("Unknown position type %d for camera id:%d",
10768 gCamCapability[cameraId]->position, cameraId);
10769 rc = -1;
10770 break;
10771 }
10772
10773
10774 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010775#ifndef USE_HAL_3_3
10776 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10777#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010778 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010779#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010780 info->static_camera_characteristics = gStaticMetadata[cameraId];
10781
10782 //For now assume both cameras can operate independently.
10783 info->conflicting_devices = NULL;
10784 info->conflicting_devices_length = 0;
10785
10786 //resource cost is 100 * MIN(1.0, m/M),
10787 //where m is throughput requirement with maximum stream configuration
10788 //and M is CPP maximum throughput.
10789 float max_fps = 0.0;
10790 for (uint32_t i = 0;
10791 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10792 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10793 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10794 }
10795 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10796 gCamCapability[cameraId]->active_array_size.width *
10797 gCamCapability[cameraId]->active_array_size.height * max_fps /
10798 gCamCapability[cameraId]->max_pixel_bandwidth;
10799 info->resource_cost = 100 * MIN(1.0, ratio);
10800 LOGI("camera %d resource cost is %d", cameraId,
10801 info->resource_cost);
10802
10803 pthread_mutex_unlock(&gCamLock);
10804 return rc;
10805}
10806
10807/*===========================================================================
10808 * FUNCTION : translateCapabilityToMetadata
10809 *
10810 * DESCRIPTION: translate the capability into camera_metadata_t
10811 *
10812 * PARAMETERS : type of the request
10813 *
10814 *
10815 * RETURN : success: camera_metadata_t*
10816 * failure: NULL
10817 *
10818 *==========================================================================*/
10819camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10820{
10821 if (mDefaultMetadata[type] != NULL) {
10822 return mDefaultMetadata[type];
10823 }
10824 //first time we are handling this request
10825 //fill up the metadata structure using the wrapper class
10826 CameraMetadata settings;
10827 //translate from cam_capability_t to camera_metadata_tag_t
10828 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10829 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10830 int32_t defaultRequestID = 0;
10831 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10832
10833 /* OIS disable */
10834 char ois_prop[PROPERTY_VALUE_MAX];
10835 memset(ois_prop, 0, sizeof(ois_prop));
10836 property_get("persist.camera.ois.disable", ois_prop, "0");
10837 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10838
10839 /* Force video to use OIS */
10840 char videoOisProp[PROPERTY_VALUE_MAX];
10841 memset(videoOisProp, 0, sizeof(videoOisProp));
10842 property_get("persist.camera.ois.video", videoOisProp, "1");
10843 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010844
10845 // Hybrid AE enable/disable
10846 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10847 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10848 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10849 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10850
Thierry Strudel3d639192016-09-09 11:52:26 -070010851 uint8_t controlIntent = 0;
10852 uint8_t focusMode;
10853 uint8_t vsMode;
10854 uint8_t optStabMode;
10855 uint8_t cacMode;
10856 uint8_t edge_mode;
10857 uint8_t noise_red_mode;
10858 uint8_t tonemap_mode;
10859 bool highQualityModeEntryAvailable = FALSE;
10860 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010861 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010862 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10863 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010864 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010865 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010866 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010867
Thierry Strudel3d639192016-09-09 11:52:26 -070010868 switch (type) {
10869 case CAMERA3_TEMPLATE_PREVIEW:
10870 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10871 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10872 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10873 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10874 edge_mode = ANDROID_EDGE_MODE_FAST;
10875 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10876 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10877 break;
10878 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10879 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10880 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10881 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10882 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10883 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10884 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10885 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10886 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10887 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10888 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10889 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10890 highQualityModeEntryAvailable = TRUE;
10891 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10892 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10893 fastModeEntryAvailable = TRUE;
10894 }
10895 }
10896 if (highQualityModeEntryAvailable) {
10897 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10898 } else if (fastModeEntryAvailable) {
10899 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10900 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010901 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10902 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10903 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010904 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010905 break;
10906 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10907 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10908 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10909 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010910 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10911 edge_mode = ANDROID_EDGE_MODE_FAST;
10912 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10913 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10914 if (forceVideoOis)
10915 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10916 break;
10917 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10918 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10919 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10920 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010921 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10922 edge_mode = ANDROID_EDGE_MODE_FAST;
10923 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10924 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10925 if (forceVideoOis)
10926 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10927 break;
10928 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10929 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10930 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10931 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10932 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10933 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10934 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10935 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10936 break;
10937 case CAMERA3_TEMPLATE_MANUAL:
10938 edge_mode = ANDROID_EDGE_MODE_FAST;
10939 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10940 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10941 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10942 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10943 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10944 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10945 break;
10946 default:
10947 edge_mode = ANDROID_EDGE_MODE_FAST;
10948 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10949 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10950 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10951 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10952 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10953 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10954 break;
10955 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010956 // Set CAC to OFF if underlying device doesn't support
10957 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10958 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10959 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010960 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10961 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10962 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10963 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10964 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10965 }
10966 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010967 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010968 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010969
10970 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10971 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10972 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10973 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10974 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10975 || ois_disable)
10976 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10977 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010978 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010979
10980 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10981 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10982
10983 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10984 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10985
10986 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10987 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10988
10989 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10990 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10991
10992 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10993 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10994
10995 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10996 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10997
10998 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10999 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11000
11001 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11002 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11003
11004 /*flash*/
11005 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11006 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11007
11008 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11009 settings.update(ANDROID_FLASH_FIRING_POWER,
11010 &flashFiringLevel, 1);
11011
11012 /* lens */
11013 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11014 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11015
11016 if (gCamCapability[mCameraId]->filter_densities_count) {
11017 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11018 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11019 gCamCapability[mCameraId]->filter_densities_count);
11020 }
11021
11022 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11023 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11024
Thierry Strudel3d639192016-09-09 11:52:26 -070011025 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11026 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11027
11028 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11029 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11030
11031 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11032 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11033
11034 /* face detection (default to OFF) */
11035 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11036 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11037
Thierry Strudel54dc9782017-02-15 12:12:10 -080011038 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11039 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011040
11041 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11042 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11043
11044 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11045 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11046
Thierry Strudel3d639192016-09-09 11:52:26 -070011047
11048 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11049 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11050
11051 /* Exposure time(Update the Min Exposure Time)*/
11052 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11053 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11054
11055 /* frame duration */
11056 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11057 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11058
11059 /* sensitivity */
11060 static const int32_t default_sensitivity = 100;
11061 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011062#ifndef USE_HAL_3_3
11063 static const int32_t default_isp_sensitivity =
11064 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11065 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11066#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011067
11068 /*edge mode*/
11069 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11070
11071 /*noise reduction mode*/
11072 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11073
11074 /*color correction mode*/
11075 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11076 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11077
11078 /*transform matrix mode*/
11079 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11080
11081 int32_t scaler_crop_region[4];
11082 scaler_crop_region[0] = 0;
11083 scaler_crop_region[1] = 0;
11084 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11085 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11086 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11087
11088 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11089 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11090
11091 /*focus distance*/
11092 float focus_distance = 0.0;
11093 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11094
11095 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011096 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011097 float max_range = 0.0;
11098 float max_fixed_fps = 0.0;
11099 int32_t fps_range[2] = {0, 0};
11100 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11101 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011102 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11103 TEMPLATE_MAX_PREVIEW_FPS) {
11104 continue;
11105 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011106 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11107 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11108 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11109 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11110 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11111 if (range > max_range) {
11112 fps_range[0] =
11113 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11114 fps_range[1] =
11115 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11116 max_range = range;
11117 }
11118 } else {
11119 if (range < 0.01 && max_fixed_fps <
11120 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11121 fps_range[0] =
11122 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11123 fps_range[1] =
11124 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11125 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11126 }
11127 }
11128 }
11129 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11130
11131 /*precapture trigger*/
11132 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11133 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11134
11135 /*af trigger*/
11136 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11137 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11138
11139 /* ae & af regions */
11140 int32_t active_region[] = {
11141 gCamCapability[mCameraId]->active_array_size.left,
11142 gCamCapability[mCameraId]->active_array_size.top,
11143 gCamCapability[mCameraId]->active_array_size.left +
11144 gCamCapability[mCameraId]->active_array_size.width,
11145 gCamCapability[mCameraId]->active_array_size.top +
11146 gCamCapability[mCameraId]->active_array_size.height,
11147 0};
11148 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11149 sizeof(active_region) / sizeof(active_region[0]));
11150 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11151 sizeof(active_region) / sizeof(active_region[0]));
11152
11153 /* black level lock */
11154 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11155 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11156
Thierry Strudel3d639192016-09-09 11:52:26 -070011157 //special defaults for manual template
11158 if (type == CAMERA3_TEMPLATE_MANUAL) {
11159 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11160 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11161
11162 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11163 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11164
11165 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11166 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11167
11168 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11169 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11170
11171 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11172 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11173
11174 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11175 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11176 }
11177
11178
11179 /* TNR
11180 * We'll use this location to determine which modes TNR will be set.
11181 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11182 * This is not to be confused with linking on a per stream basis that decision
11183 * is still on per-session basis and will be handled as part of config stream
11184 */
11185 uint8_t tnr_enable = 0;
11186
11187 if (m_bTnrPreview || m_bTnrVideo) {
11188
11189 switch (type) {
11190 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11191 tnr_enable = 1;
11192 break;
11193
11194 default:
11195 tnr_enable = 0;
11196 break;
11197 }
11198
11199 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11200 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11201 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11202
11203 LOGD("TNR:%d with process plate %d for template:%d",
11204 tnr_enable, tnr_process_type, type);
11205 }
11206
11207 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011208 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011209 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11210
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011211 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011212 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11213
Shuzhen Wang920ea402017-05-03 08:49:39 -070011214 uint8_t related_camera_id = mCameraId;
11215 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011216
11217 /* CDS default */
11218 char prop[PROPERTY_VALUE_MAX];
11219 memset(prop, 0, sizeof(prop));
11220 property_get("persist.camera.CDS", prop, "Auto");
11221 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11222 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11223 if (CAM_CDS_MODE_MAX == cds_mode) {
11224 cds_mode = CAM_CDS_MODE_AUTO;
11225 }
11226
11227 /* Disabling CDS in templates which have TNR enabled*/
11228 if (tnr_enable)
11229 cds_mode = CAM_CDS_MODE_OFF;
11230
11231 int32_t mode = cds_mode;
11232 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011233
Thierry Strudel269c81a2016-10-12 12:13:59 -070011234 /* Manual Convergence AEC Speed is disabled by default*/
11235 float default_aec_speed = 0;
11236 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11237
11238 /* Manual Convergence AWB Speed is disabled by default*/
11239 float default_awb_speed = 0;
11240 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11241
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011242 // Set instant AEC to normal convergence by default
11243 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11244 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11245
Shuzhen Wang19463d72016-03-08 11:09:52 -080011246 /* hybrid ae */
11247 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11248
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011249 if (gExposeEnableZslKey) {
11250 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11251 }
11252
Thierry Strudel3d639192016-09-09 11:52:26 -070011253 mDefaultMetadata[type] = settings.release();
11254
11255 return mDefaultMetadata[type];
11256}
11257
11258/*===========================================================================
11259 * FUNCTION : setFrameParameters
11260 *
11261 * DESCRIPTION: set parameters per frame as requested in the metadata from
11262 * framework
11263 *
11264 * PARAMETERS :
11265 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011266 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011267 * @blob_request: Whether this request is a blob request or not
11268 *
11269 * RETURN : success: NO_ERROR
11270 * failure:
11271 *==========================================================================*/
11272int QCamera3HardwareInterface::setFrameParameters(
11273 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011274 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011275 int blob_request,
11276 uint32_t snapshotStreamId)
11277{
11278 /*translate from camera_metadata_t type to parm_type_t*/
11279 int rc = 0;
11280 int32_t hal_version = CAM_HAL_V3;
11281
11282 clear_metadata_buffer(mParameters);
11283 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11284 LOGE("Failed to set hal version in the parameters");
11285 return BAD_VALUE;
11286 }
11287
11288 /*we need to update the frame number in the parameters*/
11289 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11290 request->frame_number)) {
11291 LOGE("Failed to set the frame number in the parameters");
11292 return BAD_VALUE;
11293 }
11294
11295 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011296 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011297 LOGE("Failed to set stream type mask in the parameters");
11298 return BAD_VALUE;
11299 }
11300
11301 if (mUpdateDebugLevel) {
11302 uint32_t dummyDebugLevel = 0;
11303 /* The value of dummyDebugLevel is irrelavent. On
11304 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11305 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11306 dummyDebugLevel)) {
11307 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11308 return BAD_VALUE;
11309 }
11310 mUpdateDebugLevel = false;
11311 }
11312
11313 if(request->settings != NULL){
11314 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11315 if (blob_request)
11316 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11317 }
11318
11319 return rc;
11320}
11321
11322/*===========================================================================
11323 * FUNCTION : setReprocParameters
11324 *
11325 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11326 * return it.
11327 *
11328 * PARAMETERS :
11329 * @request : request that needs to be serviced
11330 *
11331 * RETURN : success: NO_ERROR
11332 * failure:
11333 *==========================================================================*/
11334int32_t QCamera3HardwareInterface::setReprocParameters(
11335 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11336 uint32_t snapshotStreamId)
11337{
11338 /*translate from camera_metadata_t type to parm_type_t*/
11339 int rc = 0;
11340
11341 if (NULL == request->settings){
11342 LOGE("Reprocess settings cannot be NULL");
11343 return BAD_VALUE;
11344 }
11345
11346 if (NULL == reprocParam) {
11347 LOGE("Invalid reprocessing metadata buffer");
11348 return BAD_VALUE;
11349 }
11350 clear_metadata_buffer(reprocParam);
11351
11352 /*we need to update the frame number in the parameters*/
11353 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11354 request->frame_number)) {
11355 LOGE("Failed to set the frame number in the parameters");
11356 return BAD_VALUE;
11357 }
11358
11359 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11360 if (rc < 0) {
11361 LOGE("Failed to translate reproc request");
11362 return rc;
11363 }
11364
11365 CameraMetadata frame_settings;
11366 frame_settings = request->settings;
11367 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11368 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11369 int32_t *crop_count =
11370 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11371 int32_t *crop_data =
11372 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11373 int32_t *roi_map =
11374 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11375 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11376 cam_crop_data_t crop_meta;
11377 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11378 crop_meta.num_of_streams = 1;
11379 crop_meta.crop_info[0].crop.left = crop_data[0];
11380 crop_meta.crop_info[0].crop.top = crop_data[1];
11381 crop_meta.crop_info[0].crop.width = crop_data[2];
11382 crop_meta.crop_info[0].crop.height = crop_data[3];
11383
11384 crop_meta.crop_info[0].roi_map.left =
11385 roi_map[0];
11386 crop_meta.crop_info[0].roi_map.top =
11387 roi_map[1];
11388 crop_meta.crop_info[0].roi_map.width =
11389 roi_map[2];
11390 crop_meta.crop_info[0].roi_map.height =
11391 roi_map[3];
11392
11393 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11394 rc = BAD_VALUE;
11395 }
11396 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11397 request->input_buffer->stream,
11398 crop_meta.crop_info[0].crop.left,
11399 crop_meta.crop_info[0].crop.top,
11400 crop_meta.crop_info[0].crop.width,
11401 crop_meta.crop_info[0].crop.height);
11402 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11403 request->input_buffer->stream,
11404 crop_meta.crop_info[0].roi_map.left,
11405 crop_meta.crop_info[0].roi_map.top,
11406 crop_meta.crop_info[0].roi_map.width,
11407 crop_meta.crop_info[0].roi_map.height);
11408 } else {
11409 LOGE("Invalid reprocess crop count %d!", *crop_count);
11410 }
11411 } else {
11412 LOGE("No crop data from matching output stream");
11413 }
11414
11415 /* These settings are not needed for regular requests so handle them specially for
11416 reprocess requests; information needed for EXIF tags */
11417 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11418 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11419 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11420 if (NAME_NOT_FOUND != val) {
11421 uint32_t flashMode = (uint32_t)val;
11422 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11423 rc = BAD_VALUE;
11424 }
11425 } else {
11426 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11427 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11428 }
11429 } else {
11430 LOGH("No flash mode in reprocess settings");
11431 }
11432
11433 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11434 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11435 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11436 rc = BAD_VALUE;
11437 }
11438 } else {
11439 LOGH("No flash state in reprocess settings");
11440 }
11441
11442 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11443 uint8_t *reprocessFlags =
11444 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11445 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11446 *reprocessFlags)) {
11447 rc = BAD_VALUE;
11448 }
11449 }
11450
Thierry Strudel54dc9782017-02-15 12:12:10 -080011451 // Add exif debug data to internal metadata
11452 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11453 mm_jpeg_debug_exif_params_t *debug_params =
11454 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11455 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11456 // AE
11457 if (debug_params->ae_debug_params_valid == TRUE) {
11458 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11459 debug_params->ae_debug_params);
11460 }
11461 // AWB
11462 if (debug_params->awb_debug_params_valid == TRUE) {
11463 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11464 debug_params->awb_debug_params);
11465 }
11466 // AF
11467 if (debug_params->af_debug_params_valid == TRUE) {
11468 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11469 debug_params->af_debug_params);
11470 }
11471 // ASD
11472 if (debug_params->asd_debug_params_valid == TRUE) {
11473 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11474 debug_params->asd_debug_params);
11475 }
11476 // Stats
11477 if (debug_params->stats_debug_params_valid == TRUE) {
11478 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11479 debug_params->stats_debug_params);
11480 }
11481 // BE Stats
11482 if (debug_params->bestats_debug_params_valid == TRUE) {
11483 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11484 debug_params->bestats_debug_params);
11485 }
11486 // BHIST
11487 if (debug_params->bhist_debug_params_valid == TRUE) {
11488 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11489 debug_params->bhist_debug_params);
11490 }
11491 // 3A Tuning
11492 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11493 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11494 debug_params->q3a_tuning_debug_params);
11495 }
11496 }
11497
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011498 // Add metadata which reprocess needs
11499 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11500 cam_reprocess_info_t *repro_info =
11501 (cam_reprocess_info_t *)frame_settings.find
11502 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011503 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011504 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011505 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011506 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011507 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011508 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011509 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011510 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011511 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011512 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011513 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011514 repro_info->pipeline_flip);
11515 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11516 repro_info->af_roi);
11517 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11518 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011519 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11520 CAM_INTF_PARM_ROTATION metadata then has been added in
11521 translateToHalMetadata. HAL need to keep this new rotation
11522 metadata. Otherwise, the old rotation info saved in the vendor tag
11523 would be used */
11524 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11525 CAM_INTF_PARM_ROTATION, reprocParam) {
11526 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11527 } else {
11528 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011529 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011530 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011531 }
11532
11533 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11534 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11535 roi.width and roi.height would be the final JPEG size.
11536 For now, HAL only checks this for reprocess request */
11537 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11538 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11539 uint8_t *enable =
11540 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11541 if (*enable == TRUE) {
11542 int32_t *crop_data =
11543 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11544 cam_stream_crop_info_t crop_meta;
11545 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11546 crop_meta.stream_id = 0;
11547 crop_meta.crop.left = crop_data[0];
11548 crop_meta.crop.top = crop_data[1];
11549 crop_meta.crop.width = crop_data[2];
11550 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011551 // The JPEG crop roi should match cpp output size
11552 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11553 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11554 crop_meta.roi_map.left = 0;
11555 crop_meta.roi_map.top = 0;
11556 crop_meta.roi_map.width = cpp_crop->crop.width;
11557 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011558 }
11559 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11560 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011561 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011562 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011563 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11564 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011565 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011566 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11567
11568 // Add JPEG scale information
11569 cam_dimension_t scale_dim;
11570 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11571 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11572 int32_t *roi =
11573 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11574 scale_dim.width = roi[2];
11575 scale_dim.height = roi[3];
11576 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11577 scale_dim);
11578 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11579 scale_dim.width, scale_dim.height, mCameraId);
11580 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011581 }
11582 }
11583
11584 return rc;
11585}
11586
11587/*===========================================================================
11588 * FUNCTION : saveRequestSettings
11589 *
11590 * DESCRIPTION: Add any settings that might have changed to the request settings
11591 * and save the settings to be applied on the frame
11592 *
11593 * PARAMETERS :
11594 * @jpegMetadata : the extracted and/or modified jpeg metadata
11595 * @request : request with initial settings
11596 *
11597 * RETURN :
11598 * camera_metadata_t* : pointer to the saved request settings
11599 *==========================================================================*/
11600camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11601 const CameraMetadata &jpegMetadata,
11602 camera3_capture_request_t *request)
11603{
11604 camera_metadata_t *resultMetadata;
11605 CameraMetadata camMetadata;
11606 camMetadata = request->settings;
11607
11608 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11609 int32_t thumbnail_size[2];
11610 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11611 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11612 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11613 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11614 }
11615
11616 if (request->input_buffer != NULL) {
11617 uint8_t reprocessFlags = 1;
11618 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11619 (uint8_t*)&reprocessFlags,
11620 sizeof(reprocessFlags));
11621 }
11622
11623 resultMetadata = camMetadata.release();
11624 return resultMetadata;
11625}
11626
11627/*===========================================================================
11628 * FUNCTION : setHalFpsRange
11629 *
11630 * DESCRIPTION: set FPS range parameter
11631 *
11632 *
11633 * PARAMETERS :
11634 * @settings : Metadata from framework
11635 * @hal_metadata: Metadata buffer
11636 *
11637 *
11638 * RETURN : success: NO_ERROR
11639 * failure:
11640 *==========================================================================*/
11641int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11642 metadata_buffer_t *hal_metadata)
11643{
11644 int32_t rc = NO_ERROR;
11645 cam_fps_range_t fps_range;
11646 fps_range.min_fps = (float)
11647 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11648 fps_range.max_fps = (float)
11649 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11650 fps_range.video_min_fps = fps_range.min_fps;
11651 fps_range.video_max_fps = fps_range.max_fps;
11652
11653 LOGD("aeTargetFpsRange fps: [%f %f]",
11654 fps_range.min_fps, fps_range.max_fps);
11655 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11656 * follows:
11657 * ---------------------------------------------------------------|
11658 * Video stream is absent in configure_streams |
11659 * (Camcorder preview before the first video record |
11660 * ---------------------------------------------------------------|
11661 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11662 * | | | vid_min/max_fps|
11663 * ---------------------------------------------------------------|
11664 * NO | [ 30, 240] | 240 | [240, 240] |
11665 * |-------------|-------------|----------------|
11666 * | [240, 240] | 240 | [240, 240] |
11667 * ---------------------------------------------------------------|
11668 * Video stream is present in configure_streams |
11669 * ---------------------------------------------------------------|
11670 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11671 * | | | vid_min/max_fps|
11672 * ---------------------------------------------------------------|
11673 * NO | [ 30, 240] | 240 | [240, 240] |
11674 * (camcorder prev |-------------|-------------|----------------|
11675 * after video rec | [240, 240] | 240 | [240, 240] |
11676 * is stopped) | | | |
11677 * ---------------------------------------------------------------|
11678 * YES | [ 30, 240] | 240 | [240, 240] |
11679 * |-------------|-------------|----------------|
11680 * | [240, 240] | 240 | [240, 240] |
11681 * ---------------------------------------------------------------|
11682 * When Video stream is absent in configure_streams,
11683 * preview fps = sensor_fps / batchsize
11684 * Eg: for 240fps at batchSize 4, preview = 60fps
11685 * for 120fps at batchSize 4, preview = 30fps
11686 *
11687 * When video stream is present in configure_streams, preview fps is as per
11688 * the ratio of preview buffers to video buffers requested in process
11689 * capture request
11690 */
11691 mBatchSize = 0;
11692 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11693 fps_range.min_fps = fps_range.video_max_fps;
11694 fps_range.video_min_fps = fps_range.video_max_fps;
11695 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11696 fps_range.max_fps);
11697 if (NAME_NOT_FOUND != val) {
11698 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11699 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11700 return BAD_VALUE;
11701 }
11702
11703 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11704 /* If batchmode is currently in progress and the fps changes,
11705 * set the flag to restart the sensor */
11706 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11707 (mHFRVideoFps != fps_range.max_fps)) {
11708 mNeedSensorRestart = true;
11709 }
11710 mHFRVideoFps = fps_range.max_fps;
11711 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11712 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11713 mBatchSize = MAX_HFR_BATCH_SIZE;
11714 }
11715 }
11716 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11717
11718 }
11719 } else {
11720 /* HFR mode is session param in backend/ISP. This should be reset when
11721 * in non-HFR mode */
11722 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11723 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11724 return BAD_VALUE;
11725 }
11726 }
11727 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11728 return BAD_VALUE;
11729 }
11730 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11731 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11732 return rc;
11733}
11734
11735/*===========================================================================
11736 * FUNCTION : translateToHalMetadata
11737 *
11738 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11739 *
11740 *
11741 * PARAMETERS :
11742 * @request : request sent from framework
11743 *
11744 *
11745 * RETURN : success: NO_ERROR
11746 * failure:
11747 *==========================================================================*/
11748int QCamera3HardwareInterface::translateToHalMetadata
11749 (const camera3_capture_request_t *request,
11750 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011751 uint32_t snapshotStreamId) {
11752 if (request == nullptr || hal_metadata == nullptr) {
11753 return BAD_VALUE;
11754 }
11755
11756 int64_t minFrameDuration = getMinFrameDuration(request);
11757
11758 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11759 minFrameDuration);
11760}
11761
11762int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11763 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11764 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11765
Thierry Strudel3d639192016-09-09 11:52:26 -070011766 int rc = 0;
11767 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011768 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011769
11770 /* Do not change the order of the following list unless you know what you are
11771 * doing.
11772 * The order is laid out in such a way that parameters in the front of the table
11773 * may be used to override the parameters later in the table. Examples are:
11774 * 1. META_MODE should precede AEC/AWB/AF MODE
11775 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11776 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11777 * 4. Any mode should precede it's corresponding settings
11778 */
11779 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11780 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11781 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11782 rc = BAD_VALUE;
11783 }
11784 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11785 if (rc != NO_ERROR) {
11786 LOGE("extractSceneMode failed");
11787 }
11788 }
11789
11790 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11791 uint8_t fwk_aeMode =
11792 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11793 uint8_t aeMode;
11794 int32_t redeye;
11795
11796 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11797 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011798 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11799 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011800 } else {
11801 aeMode = CAM_AE_MODE_ON;
11802 }
11803 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11804 redeye = 1;
11805 } else {
11806 redeye = 0;
11807 }
11808
11809 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11810 fwk_aeMode);
11811 if (NAME_NOT_FOUND != val) {
11812 int32_t flashMode = (int32_t)val;
11813 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11814 }
11815
11816 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11817 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11818 rc = BAD_VALUE;
11819 }
11820 }
11821
11822 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11823 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11824 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11825 fwk_whiteLevel);
11826 if (NAME_NOT_FOUND != val) {
11827 uint8_t whiteLevel = (uint8_t)val;
11828 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11829 rc = BAD_VALUE;
11830 }
11831 }
11832 }
11833
11834 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11835 uint8_t fwk_cacMode =
11836 frame_settings.find(
11837 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11838 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11839 fwk_cacMode);
11840 if (NAME_NOT_FOUND != val) {
11841 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11842 bool entryAvailable = FALSE;
11843 // Check whether Frameworks set CAC mode is supported in device or not
11844 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11845 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11846 entryAvailable = TRUE;
11847 break;
11848 }
11849 }
11850 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11851 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11852 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11853 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11854 if (entryAvailable == FALSE) {
11855 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11856 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11857 } else {
11858 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11859 // High is not supported and so set the FAST as spec say's underlying
11860 // device implementation can be the same for both modes.
11861 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11862 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11863 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11864 // in order to avoid the fps drop due to high quality
11865 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11866 } else {
11867 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11868 }
11869 }
11870 }
11871 LOGD("Final cacMode is %d", cacMode);
11872 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11873 rc = BAD_VALUE;
11874 }
11875 } else {
11876 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11877 }
11878 }
11879
Thierry Strudel2896d122017-02-23 19:18:03 -080011880 char af_value[PROPERTY_VALUE_MAX];
11881 property_get("persist.camera.af.infinity", af_value, "0");
11882
Jason Lee84ae9972017-02-24 13:24:24 -080011883 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011884 if (atoi(af_value) == 0) {
11885 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011886 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011887 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11888 fwk_focusMode);
11889 if (NAME_NOT_FOUND != val) {
11890 uint8_t focusMode = (uint8_t)val;
11891 LOGD("set focus mode %d", focusMode);
11892 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11893 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11894 rc = BAD_VALUE;
11895 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011896 }
11897 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011898 } else {
11899 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11900 LOGE("Focus forced to infinity %d", focusMode);
11901 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11902 rc = BAD_VALUE;
11903 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011904 }
11905
Jason Lee84ae9972017-02-24 13:24:24 -080011906 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11907 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011908 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11909 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11910 focalDistance)) {
11911 rc = BAD_VALUE;
11912 }
11913 }
11914
11915 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11916 uint8_t fwk_antibandingMode =
11917 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11918 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11919 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11920 if (NAME_NOT_FOUND != val) {
11921 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011922 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11923 if (m60HzZone) {
11924 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11925 } else {
11926 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11927 }
11928 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011929 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11930 hal_antibandingMode)) {
11931 rc = BAD_VALUE;
11932 }
11933 }
11934 }
11935
11936 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11937 int32_t expCompensation = frame_settings.find(
11938 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11939 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11940 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11941 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11942 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011943 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011944 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11945 expCompensation)) {
11946 rc = BAD_VALUE;
11947 }
11948 }
11949
11950 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11951 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11952 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11953 rc = BAD_VALUE;
11954 }
11955 }
11956 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11957 rc = setHalFpsRange(frame_settings, hal_metadata);
11958 if (rc != NO_ERROR) {
11959 LOGE("setHalFpsRange failed");
11960 }
11961 }
11962
11963 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11964 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11965 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11966 rc = BAD_VALUE;
11967 }
11968 }
11969
11970 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11971 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11972 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11973 fwk_effectMode);
11974 if (NAME_NOT_FOUND != val) {
11975 uint8_t effectMode = (uint8_t)val;
11976 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11977 rc = BAD_VALUE;
11978 }
11979 }
11980 }
11981
11982 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11983 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11984 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11985 colorCorrectMode)) {
11986 rc = BAD_VALUE;
11987 }
11988 }
11989
11990 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11991 cam_color_correct_gains_t colorCorrectGains;
11992 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11993 colorCorrectGains.gains[i] =
11994 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11995 }
11996 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11997 colorCorrectGains)) {
11998 rc = BAD_VALUE;
11999 }
12000 }
12001
12002 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12003 cam_color_correct_matrix_t colorCorrectTransform;
12004 cam_rational_type_t transform_elem;
12005 size_t num = 0;
12006 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12007 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12008 transform_elem.numerator =
12009 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12010 transform_elem.denominator =
12011 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12012 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12013 num++;
12014 }
12015 }
12016 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12017 colorCorrectTransform)) {
12018 rc = BAD_VALUE;
12019 }
12020 }
12021
12022 cam_trigger_t aecTrigger;
12023 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12024 aecTrigger.trigger_id = -1;
12025 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12026 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12027 aecTrigger.trigger =
12028 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12029 aecTrigger.trigger_id =
12030 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12031 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12032 aecTrigger)) {
12033 rc = BAD_VALUE;
12034 }
12035 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12036 aecTrigger.trigger, aecTrigger.trigger_id);
12037 }
12038
12039 /*af_trigger must come with a trigger id*/
12040 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12041 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12042 cam_trigger_t af_trigger;
12043 af_trigger.trigger =
12044 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12045 af_trigger.trigger_id =
12046 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12048 rc = BAD_VALUE;
12049 }
12050 LOGD("AfTrigger: %d AfTriggerID: %d",
12051 af_trigger.trigger, af_trigger.trigger_id);
12052 }
12053
12054 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12055 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12056 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12057 rc = BAD_VALUE;
12058 }
12059 }
12060 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12061 cam_edge_application_t edge_application;
12062 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012063
Thierry Strudel3d639192016-09-09 11:52:26 -070012064 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12065 edge_application.sharpness = 0;
12066 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012067 edge_application.sharpness =
12068 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12069 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12070 int32_t sharpness =
12071 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12072 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12073 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12074 LOGD("Setting edge mode sharpness %d", sharpness);
12075 edge_application.sharpness = sharpness;
12076 }
12077 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012078 }
12079 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12080 rc = BAD_VALUE;
12081 }
12082 }
12083
12084 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12085 int32_t respectFlashMode = 1;
12086 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12087 uint8_t fwk_aeMode =
12088 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012089 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12090 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12091 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012092 respectFlashMode = 0;
12093 LOGH("AE Mode controls flash, ignore android.flash.mode");
12094 }
12095 }
12096 if (respectFlashMode) {
12097 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12098 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12099 LOGH("flash mode after mapping %d", val);
12100 // To check: CAM_INTF_META_FLASH_MODE usage
12101 if (NAME_NOT_FOUND != val) {
12102 uint8_t flashMode = (uint8_t)val;
12103 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12104 rc = BAD_VALUE;
12105 }
12106 }
12107 }
12108 }
12109
12110 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12111 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12112 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12113 rc = BAD_VALUE;
12114 }
12115 }
12116
12117 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12118 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12119 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12120 flashFiringTime)) {
12121 rc = BAD_VALUE;
12122 }
12123 }
12124
12125 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12126 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12127 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12128 hotPixelMode)) {
12129 rc = BAD_VALUE;
12130 }
12131 }
12132
12133 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12134 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12135 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12136 lensAperture)) {
12137 rc = BAD_VALUE;
12138 }
12139 }
12140
12141 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12142 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12143 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12144 filterDensity)) {
12145 rc = BAD_VALUE;
12146 }
12147 }
12148
12149 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12150 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12151 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12152 focalLength)) {
12153 rc = BAD_VALUE;
12154 }
12155 }
12156
12157 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12158 uint8_t optStabMode =
12159 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12160 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12161 optStabMode)) {
12162 rc = BAD_VALUE;
12163 }
12164 }
12165
12166 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12167 uint8_t videoStabMode =
12168 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12169 LOGD("videoStabMode from APP = %d", videoStabMode);
12170 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12171 videoStabMode)) {
12172 rc = BAD_VALUE;
12173 }
12174 }
12175
12176
12177 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12178 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12179 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12180 noiseRedMode)) {
12181 rc = BAD_VALUE;
12182 }
12183 }
12184
12185 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12186 float reprocessEffectiveExposureFactor =
12187 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12188 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12189 reprocessEffectiveExposureFactor)) {
12190 rc = BAD_VALUE;
12191 }
12192 }
12193
12194 cam_crop_region_t scalerCropRegion;
12195 bool scalerCropSet = false;
12196 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12197 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12198 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12199 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12200 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12201
12202 // Map coordinate system from active array to sensor output.
12203 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12204 scalerCropRegion.width, scalerCropRegion.height);
12205
12206 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12207 scalerCropRegion)) {
12208 rc = BAD_VALUE;
12209 }
12210 scalerCropSet = true;
12211 }
12212
12213 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12214 int64_t sensorExpTime =
12215 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12216 LOGD("setting sensorExpTime %lld", sensorExpTime);
12217 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12218 sensorExpTime)) {
12219 rc = BAD_VALUE;
12220 }
12221 }
12222
12223 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12224 int64_t sensorFrameDuration =
12225 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012226 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12227 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12228 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12229 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12231 sensorFrameDuration)) {
12232 rc = BAD_VALUE;
12233 }
12234 }
12235
12236 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12237 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12238 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12239 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12240 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12241 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12242 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12243 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12244 sensorSensitivity)) {
12245 rc = BAD_VALUE;
12246 }
12247 }
12248
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012249#ifndef USE_HAL_3_3
12250 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12251 int32_t ispSensitivity =
12252 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12253 if (ispSensitivity <
12254 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12255 ispSensitivity =
12256 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12257 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12258 }
12259 if (ispSensitivity >
12260 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12261 ispSensitivity =
12262 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12263 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12264 }
12265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12266 ispSensitivity)) {
12267 rc = BAD_VALUE;
12268 }
12269 }
12270#endif
12271
Thierry Strudel3d639192016-09-09 11:52:26 -070012272 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12273 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12274 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12275 rc = BAD_VALUE;
12276 }
12277 }
12278
12279 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12280 uint8_t fwk_facedetectMode =
12281 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12282
12283 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12284 fwk_facedetectMode);
12285
12286 if (NAME_NOT_FOUND != val) {
12287 uint8_t facedetectMode = (uint8_t)val;
12288 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12289 facedetectMode)) {
12290 rc = BAD_VALUE;
12291 }
12292 }
12293 }
12294
Thierry Strudel54dc9782017-02-15 12:12:10 -080012295 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012296 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012297 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012298 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12299 histogramMode)) {
12300 rc = BAD_VALUE;
12301 }
12302 }
12303
12304 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12305 uint8_t sharpnessMapMode =
12306 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12307 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12308 sharpnessMapMode)) {
12309 rc = BAD_VALUE;
12310 }
12311 }
12312
12313 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12314 uint8_t tonemapMode =
12315 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12316 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12317 rc = BAD_VALUE;
12318 }
12319 }
12320 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12321 /*All tonemap channels will have the same number of points*/
12322 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12323 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12324 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12325 cam_rgb_tonemap_curves tonemapCurves;
12326 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12327 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12328 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12329 tonemapCurves.tonemap_points_cnt,
12330 CAM_MAX_TONEMAP_CURVE_SIZE);
12331 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12332 }
12333
12334 /* ch0 = G*/
12335 size_t point = 0;
12336 cam_tonemap_curve_t tonemapCurveGreen;
12337 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12338 for (size_t j = 0; j < 2; j++) {
12339 tonemapCurveGreen.tonemap_points[i][j] =
12340 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12341 point++;
12342 }
12343 }
12344 tonemapCurves.curves[0] = tonemapCurveGreen;
12345
12346 /* ch 1 = B */
12347 point = 0;
12348 cam_tonemap_curve_t tonemapCurveBlue;
12349 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12350 for (size_t j = 0; j < 2; j++) {
12351 tonemapCurveBlue.tonemap_points[i][j] =
12352 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12353 point++;
12354 }
12355 }
12356 tonemapCurves.curves[1] = tonemapCurveBlue;
12357
12358 /* ch 2 = R */
12359 point = 0;
12360 cam_tonemap_curve_t tonemapCurveRed;
12361 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12362 for (size_t j = 0; j < 2; j++) {
12363 tonemapCurveRed.tonemap_points[i][j] =
12364 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12365 point++;
12366 }
12367 }
12368 tonemapCurves.curves[2] = tonemapCurveRed;
12369
12370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12371 tonemapCurves)) {
12372 rc = BAD_VALUE;
12373 }
12374 }
12375
12376 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12377 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12378 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12379 captureIntent)) {
12380 rc = BAD_VALUE;
12381 }
12382 }
12383
12384 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12385 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12386 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12387 blackLevelLock)) {
12388 rc = BAD_VALUE;
12389 }
12390 }
12391
12392 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12393 uint8_t lensShadingMapMode =
12394 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12395 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12396 lensShadingMapMode)) {
12397 rc = BAD_VALUE;
12398 }
12399 }
12400
12401 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12402 cam_area_t roi;
12403 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012404 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012405
12406 // Map coordinate system from active array to sensor output.
12407 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12408 roi.rect.height);
12409
12410 if (scalerCropSet) {
12411 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12412 }
12413 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12414 rc = BAD_VALUE;
12415 }
12416 }
12417
12418 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12419 cam_area_t roi;
12420 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012421 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012422
12423 // Map coordinate system from active array to sensor output.
12424 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12425 roi.rect.height);
12426
12427 if (scalerCropSet) {
12428 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12429 }
12430 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12431 rc = BAD_VALUE;
12432 }
12433 }
12434
12435 // CDS for non-HFR non-video mode
12436 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12437 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12438 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12439 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12440 LOGE("Invalid CDS mode %d!", *fwk_cds);
12441 } else {
12442 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12443 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12444 rc = BAD_VALUE;
12445 }
12446 }
12447 }
12448
Thierry Strudel04e026f2016-10-10 11:27:36 -070012449 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012450 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012451 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012452 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12453 }
12454 if (m_bVideoHdrEnabled)
12455 vhdr = CAM_VIDEO_HDR_MODE_ON;
12456
Thierry Strudel54dc9782017-02-15 12:12:10 -080012457 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12458
12459 if(vhdr != curr_hdr_state)
12460 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12461
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012462 rc = setVideoHdrMode(mParameters, vhdr);
12463 if (rc != NO_ERROR) {
12464 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012465 }
12466
12467 //IR
12468 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12469 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12470 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012471 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12472 uint8_t isIRon = 0;
12473
12474 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012475 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12476 LOGE("Invalid IR mode %d!", fwk_ir);
12477 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012478 if(isIRon != curr_ir_state )
12479 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12480
Thierry Strudel04e026f2016-10-10 11:27:36 -070012481 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12482 CAM_INTF_META_IR_MODE, fwk_ir)) {
12483 rc = BAD_VALUE;
12484 }
12485 }
12486 }
12487
Thierry Strudel54dc9782017-02-15 12:12:10 -080012488 //Binning Correction Mode
12489 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12490 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12491 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12492 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12493 || (0 > fwk_binning_correction)) {
12494 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12495 } else {
12496 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12497 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12498 rc = BAD_VALUE;
12499 }
12500 }
12501 }
12502
Thierry Strudel269c81a2016-10-12 12:13:59 -070012503 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12504 float aec_speed;
12505 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12506 LOGD("AEC Speed :%f", aec_speed);
12507 if ( aec_speed < 0 ) {
12508 LOGE("Invalid AEC mode %f!", aec_speed);
12509 } else {
12510 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12511 aec_speed)) {
12512 rc = BAD_VALUE;
12513 }
12514 }
12515 }
12516
12517 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12518 float awb_speed;
12519 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12520 LOGD("AWB Speed :%f", awb_speed);
12521 if ( awb_speed < 0 ) {
12522 LOGE("Invalid AWB mode %f!", awb_speed);
12523 } else {
12524 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12525 awb_speed)) {
12526 rc = BAD_VALUE;
12527 }
12528 }
12529 }
12530
Thierry Strudel3d639192016-09-09 11:52:26 -070012531 // TNR
12532 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12533 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12534 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012535 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012536 cam_denoise_param_t tnr;
12537 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12538 tnr.process_plates =
12539 (cam_denoise_process_type_t)frame_settings.find(
12540 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12541 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012542
12543 if(b_TnrRequested != curr_tnr_state)
12544 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12545
Thierry Strudel3d639192016-09-09 11:52:26 -070012546 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12547 rc = BAD_VALUE;
12548 }
12549 }
12550
Thierry Strudel54dc9782017-02-15 12:12:10 -080012551 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012552 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012553 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012554 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12555 *exposure_metering_mode)) {
12556 rc = BAD_VALUE;
12557 }
12558 }
12559
Thierry Strudel3d639192016-09-09 11:52:26 -070012560 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12561 int32_t fwk_testPatternMode =
12562 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12563 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12564 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12565
12566 if (NAME_NOT_FOUND != testPatternMode) {
12567 cam_test_pattern_data_t testPatternData;
12568 memset(&testPatternData, 0, sizeof(testPatternData));
12569 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12570 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12571 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12572 int32_t *fwk_testPatternData =
12573 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12574 testPatternData.r = fwk_testPatternData[0];
12575 testPatternData.b = fwk_testPatternData[3];
12576 switch (gCamCapability[mCameraId]->color_arrangement) {
12577 case CAM_FILTER_ARRANGEMENT_RGGB:
12578 case CAM_FILTER_ARRANGEMENT_GRBG:
12579 testPatternData.gr = fwk_testPatternData[1];
12580 testPatternData.gb = fwk_testPatternData[2];
12581 break;
12582 case CAM_FILTER_ARRANGEMENT_GBRG:
12583 case CAM_FILTER_ARRANGEMENT_BGGR:
12584 testPatternData.gr = fwk_testPatternData[2];
12585 testPatternData.gb = fwk_testPatternData[1];
12586 break;
12587 default:
12588 LOGE("color arrangement %d is not supported",
12589 gCamCapability[mCameraId]->color_arrangement);
12590 break;
12591 }
12592 }
12593 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12594 testPatternData)) {
12595 rc = BAD_VALUE;
12596 }
12597 } else {
12598 LOGE("Invalid framework sensor test pattern mode %d",
12599 fwk_testPatternMode);
12600 }
12601 }
12602
12603 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12604 size_t count = 0;
12605 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12606 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12607 gps_coords.data.d, gps_coords.count, count);
12608 if (gps_coords.count != count) {
12609 rc = BAD_VALUE;
12610 }
12611 }
12612
12613 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12614 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12615 size_t count = 0;
12616 const char *gps_methods_src = (const char *)
12617 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12618 memset(gps_methods, '\0', sizeof(gps_methods));
12619 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12620 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12621 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12622 if (GPS_PROCESSING_METHOD_SIZE != count) {
12623 rc = BAD_VALUE;
12624 }
12625 }
12626
12627 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12628 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12629 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12630 gps_timestamp)) {
12631 rc = BAD_VALUE;
12632 }
12633 }
12634
12635 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12636 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12637 cam_rotation_info_t rotation_info;
12638 if (orientation == 0) {
12639 rotation_info.rotation = ROTATE_0;
12640 } else if (orientation == 90) {
12641 rotation_info.rotation = ROTATE_90;
12642 } else if (orientation == 180) {
12643 rotation_info.rotation = ROTATE_180;
12644 } else if (orientation == 270) {
12645 rotation_info.rotation = ROTATE_270;
12646 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012647 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012648 rotation_info.streamId = snapshotStreamId;
12649 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12650 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12651 rc = BAD_VALUE;
12652 }
12653 }
12654
12655 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12656 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12657 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12658 rc = BAD_VALUE;
12659 }
12660 }
12661
12662 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12663 uint32_t thumb_quality = (uint32_t)
12664 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12665 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12666 thumb_quality)) {
12667 rc = BAD_VALUE;
12668 }
12669 }
12670
12671 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12672 cam_dimension_t dim;
12673 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12674 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12675 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12676 rc = BAD_VALUE;
12677 }
12678 }
12679
12680 // Internal metadata
12681 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12682 size_t count = 0;
12683 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12684 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12685 privatedata.data.i32, privatedata.count, count);
12686 if (privatedata.count != count) {
12687 rc = BAD_VALUE;
12688 }
12689 }
12690
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012691 // ISO/Exposure Priority
12692 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12693 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12694 cam_priority_mode_t mode =
12695 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12696 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12697 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12698 use_iso_exp_pty.previewOnly = FALSE;
12699 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12700 use_iso_exp_pty.value = *ptr;
12701
12702 if(CAM_ISO_PRIORITY == mode) {
12703 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12704 use_iso_exp_pty)) {
12705 rc = BAD_VALUE;
12706 }
12707 }
12708 else {
12709 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12710 use_iso_exp_pty)) {
12711 rc = BAD_VALUE;
12712 }
12713 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012714
12715 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12716 rc = BAD_VALUE;
12717 }
12718 }
12719 } else {
12720 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12721 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012722 }
12723 }
12724
12725 // Saturation
12726 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12727 int32_t* use_saturation =
12728 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12729 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12730 rc = BAD_VALUE;
12731 }
12732 }
12733
Thierry Strudel3d639192016-09-09 11:52:26 -070012734 // EV step
12735 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12736 gCamCapability[mCameraId]->exp_compensation_step)) {
12737 rc = BAD_VALUE;
12738 }
12739
12740 // CDS info
12741 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12742 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12743 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12744
12745 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12746 CAM_INTF_META_CDS_DATA, *cdsData)) {
12747 rc = BAD_VALUE;
12748 }
12749 }
12750
Shuzhen Wang19463d72016-03-08 11:09:52 -080012751 // Hybrid AE
12752 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12753 uint8_t *hybrid_ae = (uint8_t *)
12754 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12755
12756 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12757 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12758 rc = BAD_VALUE;
12759 }
12760 }
12761
Shuzhen Wang14415f52016-11-16 18:26:18 -080012762 // Histogram
12763 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12764 uint8_t histogramMode =
12765 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12766 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12767 histogramMode)) {
12768 rc = BAD_VALUE;
12769 }
12770 }
12771
12772 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12773 int32_t histogramBins =
12774 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12775 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12776 histogramBins)) {
12777 rc = BAD_VALUE;
12778 }
12779 }
12780
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012781 // Tracking AF
12782 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12783 uint8_t trackingAfTrigger =
12784 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12785 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12786 trackingAfTrigger)) {
12787 rc = BAD_VALUE;
12788 }
12789 }
12790
Thierry Strudel3d639192016-09-09 11:52:26 -070012791 return rc;
12792}
12793
12794/*===========================================================================
12795 * FUNCTION : captureResultCb
12796 *
12797 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12798 *
12799 * PARAMETERS :
12800 * @frame : frame information from mm-camera-interface
12801 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12802 * @userdata: userdata
12803 *
12804 * RETURN : NONE
12805 *==========================================================================*/
12806void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12807 camera3_stream_buffer_t *buffer,
12808 uint32_t frame_number, bool isInputBuffer, void *userdata)
12809{
12810 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12811 if (hw == NULL) {
12812 LOGE("Invalid hw %p", hw);
12813 return;
12814 }
12815
12816 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12817 return;
12818}
12819
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012820/*===========================================================================
12821 * FUNCTION : setBufferErrorStatus
12822 *
12823 * DESCRIPTION: Callback handler for channels to report any buffer errors
12824 *
12825 * PARAMETERS :
12826 * @ch : Channel on which buffer error is reported from
12827 * @frame_number : frame number on which buffer error is reported on
12828 * @buffer_status : buffer error status
12829 * @userdata: userdata
12830 *
12831 * RETURN : NONE
12832 *==========================================================================*/
12833void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12834 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12835{
12836 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12837 if (hw == NULL) {
12838 LOGE("Invalid hw %p", hw);
12839 return;
12840 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012841
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012842 hw->setBufferErrorStatus(ch, frame_number, err);
12843 return;
12844}
12845
12846void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12847 uint32_t frameNumber, camera3_buffer_status_t err)
12848{
12849 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12850 pthread_mutex_lock(&mMutex);
12851
12852 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12853 if (req.frame_number != frameNumber)
12854 continue;
12855 for (auto& k : req.mPendingBufferList) {
12856 if(k.stream->priv == ch) {
12857 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12858 }
12859 }
12860 }
12861
12862 pthread_mutex_unlock(&mMutex);
12863 return;
12864}
Thierry Strudel3d639192016-09-09 11:52:26 -070012865/*===========================================================================
12866 * FUNCTION : initialize
12867 *
12868 * DESCRIPTION: Pass framework callback pointers to HAL
12869 *
12870 * PARAMETERS :
12871 *
12872 *
12873 * RETURN : Success : 0
12874 * Failure: -ENODEV
12875 *==========================================================================*/
12876
12877int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12878 const camera3_callback_ops_t *callback_ops)
12879{
12880 LOGD("E");
12881 QCamera3HardwareInterface *hw =
12882 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12883 if (!hw) {
12884 LOGE("NULL camera device");
12885 return -ENODEV;
12886 }
12887
12888 int rc = hw->initialize(callback_ops);
12889 LOGD("X");
12890 return rc;
12891}
12892
12893/*===========================================================================
12894 * FUNCTION : configure_streams
12895 *
12896 * DESCRIPTION:
12897 *
12898 * PARAMETERS :
12899 *
12900 *
12901 * RETURN : Success: 0
12902 * Failure: -EINVAL (if stream configuration is invalid)
12903 * -ENODEV (fatal error)
12904 *==========================================================================*/
12905
12906int QCamera3HardwareInterface::configure_streams(
12907 const struct camera3_device *device,
12908 camera3_stream_configuration_t *stream_list)
12909{
12910 LOGD("E");
12911 QCamera3HardwareInterface *hw =
12912 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12913 if (!hw) {
12914 LOGE("NULL camera device");
12915 return -ENODEV;
12916 }
12917 int rc = hw->configureStreams(stream_list);
12918 LOGD("X");
12919 return rc;
12920}
12921
12922/*===========================================================================
12923 * FUNCTION : construct_default_request_settings
12924 *
12925 * DESCRIPTION: Configure a settings buffer to meet the required use case
12926 *
12927 * PARAMETERS :
12928 *
12929 *
12930 * RETURN : Success: Return valid metadata
12931 * Failure: Return NULL
12932 *==========================================================================*/
12933const camera_metadata_t* QCamera3HardwareInterface::
12934 construct_default_request_settings(const struct camera3_device *device,
12935 int type)
12936{
12937
12938 LOGD("E");
12939 camera_metadata_t* fwk_metadata = NULL;
12940 QCamera3HardwareInterface *hw =
12941 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12942 if (!hw) {
12943 LOGE("NULL camera device");
12944 return NULL;
12945 }
12946
12947 fwk_metadata = hw->translateCapabilityToMetadata(type);
12948
12949 LOGD("X");
12950 return fwk_metadata;
12951}
12952
12953/*===========================================================================
12954 * FUNCTION : process_capture_request
12955 *
12956 * DESCRIPTION:
12957 *
12958 * PARAMETERS :
12959 *
12960 *
12961 * RETURN :
12962 *==========================================================================*/
12963int QCamera3HardwareInterface::process_capture_request(
12964 const struct camera3_device *device,
12965 camera3_capture_request_t *request)
12966{
12967 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012968 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012969 QCamera3HardwareInterface *hw =
12970 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12971 if (!hw) {
12972 LOGE("NULL camera device");
12973 return -EINVAL;
12974 }
12975
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012976 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012977 LOGD("X");
12978 return rc;
12979}
12980
12981/*===========================================================================
12982 * FUNCTION : dump
12983 *
12984 * DESCRIPTION:
12985 *
12986 * PARAMETERS :
12987 *
12988 *
12989 * RETURN :
12990 *==========================================================================*/
12991
12992void QCamera3HardwareInterface::dump(
12993 const struct camera3_device *device, int fd)
12994{
12995 /* Log level property is read when "adb shell dumpsys media.camera" is
12996 called so that the log level can be controlled without restarting
12997 the media server */
12998 getLogLevel();
12999
13000 LOGD("E");
13001 QCamera3HardwareInterface *hw =
13002 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13003 if (!hw) {
13004 LOGE("NULL camera device");
13005 return;
13006 }
13007
13008 hw->dump(fd);
13009 LOGD("X");
13010 return;
13011}
13012
13013/*===========================================================================
13014 * FUNCTION : flush
13015 *
13016 * DESCRIPTION:
13017 *
13018 * PARAMETERS :
13019 *
13020 *
13021 * RETURN :
13022 *==========================================================================*/
13023
13024int QCamera3HardwareInterface::flush(
13025 const struct camera3_device *device)
13026{
13027 int rc;
13028 LOGD("E");
13029 QCamera3HardwareInterface *hw =
13030 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13031 if (!hw) {
13032 LOGE("NULL camera device");
13033 return -EINVAL;
13034 }
13035
13036 pthread_mutex_lock(&hw->mMutex);
13037 // Validate current state
13038 switch (hw->mState) {
13039 case STARTED:
13040 /* valid state */
13041 break;
13042
13043 case ERROR:
13044 pthread_mutex_unlock(&hw->mMutex);
13045 hw->handleCameraDeviceError();
13046 return -ENODEV;
13047
13048 default:
13049 LOGI("Flush returned during state %d", hw->mState);
13050 pthread_mutex_unlock(&hw->mMutex);
13051 return 0;
13052 }
13053 pthread_mutex_unlock(&hw->mMutex);
13054
13055 rc = hw->flush(true /* restart channels */ );
13056 LOGD("X");
13057 return rc;
13058}
13059
13060/*===========================================================================
13061 * FUNCTION : close_camera_device
13062 *
13063 * DESCRIPTION:
13064 *
13065 * PARAMETERS :
13066 *
13067 *
13068 * RETURN :
13069 *==========================================================================*/
13070int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13071{
13072 int ret = NO_ERROR;
13073 QCamera3HardwareInterface *hw =
13074 reinterpret_cast<QCamera3HardwareInterface *>(
13075 reinterpret_cast<camera3_device_t *>(device)->priv);
13076 if (!hw) {
13077 LOGE("NULL camera device");
13078 return BAD_VALUE;
13079 }
13080
13081 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13082 delete hw;
13083 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013084 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013085 return ret;
13086}
13087
13088/*===========================================================================
13089 * FUNCTION : getWaveletDenoiseProcessPlate
13090 *
13091 * DESCRIPTION: query wavelet denoise process plate
13092 *
13093 * PARAMETERS : None
13094 *
13095 * RETURN : WNR prcocess plate value
13096 *==========================================================================*/
13097cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13098{
13099 char prop[PROPERTY_VALUE_MAX];
13100 memset(prop, 0, sizeof(prop));
13101 property_get("persist.denoise.process.plates", prop, "0");
13102 int processPlate = atoi(prop);
13103 switch(processPlate) {
13104 case 0:
13105 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13106 case 1:
13107 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13108 case 2:
13109 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13110 case 3:
13111 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13112 default:
13113 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13114 }
13115}
13116
13117
13118/*===========================================================================
13119 * FUNCTION : getTemporalDenoiseProcessPlate
13120 *
13121 * DESCRIPTION: query temporal denoise process plate
13122 *
13123 * PARAMETERS : None
13124 *
13125 * RETURN : TNR prcocess plate value
13126 *==========================================================================*/
13127cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13128{
13129 char prop[PROPERTY_VALUE_MAX];
13130 memset(prop, 0, sizeof(prop));
13131 property_get("persist.tnr.process.plates", prop, "0");
13132 int processPlate = atoi(prop);
13133 switch(processPlate) {
13134 case 0:
13135 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13136 case 1:
13137 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13138 case 2:
13139 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13140 case 3:
13141 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13142 default:
13143 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13144 }
13145}
13146
13147
13148/*===========================================================================
13149 * FUNCTION : extractSceneMode
13150 *
13151 * DESCRIPTION: Extract scene mode from frameworks set metadata
13152 *
13153 * PARAMETERS :
13154 * @frame_settings: CameraMetadata reference
13155 * @metaMode: ANDROID_CONTORL_MODE
13156 * @hal_metadata: hal metadata structure
13157 *
13158 * RETURN : None
13159 *==========================================================================*/
13160int32_t QCamera3HardwareInterface::extractSceneMode(
13161 const CameraMetadata &frame_settings, uint8_t metaMode,
13162 metadata_buffer_t *hal_metadata)
13163{
13164 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013165 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13166
13167 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13168 LOGD("Ignoring control mode OFF_KEEP_STATE");
13169 return NO_ERROR;
13170 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013171
13172 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13173 camera_metadata_ro_entry entry =
13174 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13175 if (0 == entry.count)
13176 return rc;
13177
13178 uint8_t fwk_sceneMode = entry.data.u8[0];
13179
13180 int val = lookupHalName(SCENE_MODES_MAP,
13181 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13182 fwk_sceneMode);
13183 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013184 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013185 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013186 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013187 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013188
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013189 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13190 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13191 }
13192
13193 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13194 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013195 cam_hdr_param_t hdr_params;
13196 hdr_params.hdr_enable = 1;
13197 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13198 hdr_params.hdr_need_1x = false;
13199 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13200 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13201 rc = BAD_VALUE;
13202 }
13203 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013204
Thierry Strudel3d639192016-09-09 11:52:26 -070013205 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13206 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13207 rc = BAD_VALUE;
13208 }
13209 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013210
13211 if (mForceHdrSnapshot) {
13212 cam_hdr_param_t hdr_params;
13213 hdr_params.hdr_enable = 1;
13214 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13215 hdr_params.hdr_need_1x = false;
13216 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13217 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13218 rc = BAD_VALUE;
13219 }
13220 }
13221
Thierry Strudel3d639192016-09-09 11:52:26 -070013222 return rc;
13223}
13224
13225/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013226 * FUNCTION : setVideoHdrMode
13227 *
13228 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13229 *
13230 * PARAMETERS :
13231 * @hal_metadata: hal metadata structure
13232 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13233 *
13234 * RETURN : None
13235 *==========================================================================*/
13236int32_t QCamera3HardwareInterface::setVideoHdrMode(
13237 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13238{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013239 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13240 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13241 }
13242
13243 LOGE("Invalid Video HDR mode %d!", vhdr);
13244 return BAD_VALUE;
13245}
13246
13247/*===========================================================================
13248 * FUNCTION : setSensorHDR
13249 *
13250 * DESCRIPTION: Enable/disable sensor HDR.
13251 *
13252 * PARAMETERS :
13253 * @hal_metadata: hal metadata structure
13254 * @enable: boolean whether to enable/disable sensor HDR
13255 *
13256 * RETURN : None
13257 *==========================================================================*/
13258int32_t QCamera3HardwareInterface::setSensorHDR(
13259 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13260{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013261 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013262 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13263
13264 if (enable) {
13265 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13266 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13267 #ifdef _LE_CAMERA_
13268 //Default to staggered HDR for IOT
13269 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13270 #else
13271 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13272 #endif
13273 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13274 }
13275
13276 bool isSupported = false;
13277 switch (sensor_hdr) {
13278 case CAM_SENSOR_HDR_IN_SENSOR:
13279 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13280 CAM_QCOM_FEATURE_SENSOR_HDR) {
13281 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013282 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013283 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013284 break;
13285 case CAM_SENSOR_HDR_ZIGZAG:
13286 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13287 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13288 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013289 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013290 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013291 break;
13292 case CAM_SENSOR_HDR_STAGGERED:
13293 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13294 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13295 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013296 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013297 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013298 break;
13299 case CAM_SENSOR_HDR_OFF:
13300 isSupported = true;
13301 LOGD("Turning off sensor HDR");
13302 break;
13303 default:
13304 LOGE("HDR mode %d not supported", sensor_hdr);
13305 rc = BAD_VALUE;
13306 break;
13307 }
13308
13309 if(isSupported) {
13310 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13311 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13312 rc = BAD_VALUE;
13313 } else {
13314 if(!isVideoHdrEnable)
13315 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013316 }
13317 }
13318 return rc;
13319}
13320
13321/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013322 * FUNCTION : needRotationReprocess
13323 *
13324 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13325 *
13326 * PARAMETERS : none
13327 *
13328 * RETURN : true: needed
13329 * false: no need
13330 *==========================================================================*/
13331bool QCamera3HardwareInterface::needRotationReprocess()
13332{
13333 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13334 // current rotation is not zero, and pp has the capability to process rotation
13335 LOGH("need do reprocess for rotation");
13336 return true;
13337 }
13338
13339 return false;
13340}
13341
13342/*===========================================================================
13343 * FUNCTION : needReprocess
13344 *
13345 * DESCRIPTION: if reprocess in needed
13346 *
13347 * PARAMETERS : none
13348 *
13349 * RETURN : true: needed
13350 * false: no need
13351 *==========================================================================*/
13352bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13353{
13354 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13355 // TODO: add for ZSL HDR later
13356 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13357 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13358 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13359 return true;
13360 } else {
13361 LOGH("already post processed frame");
13362 return false;
13363 }
13364 }
13365 return needRotationReprocess();
13366}
13367
13368/*===========================================================================
13369 * FUNCTION : needJpegExifRotation
13370 *
13371 * DESCRIPTION: if rotation from jpeg is needed
13372 *
13373 * PARAMETERS : none
13374 *
13375 * RETURN : true: needed
13376 * false: no need
13377 *==========================================================================*/
13378bool QCamera3HardwareInterface::needJpegExifRotation()
13379{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013380 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013381 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13382 LOGD("Need use Jpeg EXIF Rotation");
13383 return true;
13384 }
13385 return false;
13386}
13387
13388/*===========================================================================
13389 * FUNCTION : addOfflineReprocChannel
13390 *
13391 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13392 * coming from input channel
13393 *
13394 * PARAMETERS :
13395 * @config : reprocess configuration
13396 * @inputChHandle : pointer to the input (source) channel
13397 *
13398 *
13399 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13400 *==========================================================================*/
13401QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13402 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13403{
13404 int32_t rc = NO_ERROR;
13405 QCamera3ReprocessChannel *pChannel = NULL;
13406
13407 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013408 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13409 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013410 if (NULL == pChannel) {
13411 LOGE("no mem for reprocess channel");
13412 return NULL;
13413 }
13414
13415 rc = pChannel->initialize(IS_TYPE_NONE);
13416 if (rc != NO_ERROR) {
13417 LOGE("init reprocess channel failed, ret = %d", rc);
13418 delete pChannel;
13419 return NULL;
13420 }
13421
13422 // pp feature config
13423 cam_pp_feature_config_t pp_config;
13424 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13425
13426 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13427 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13428 & CAM_QCOM_FEATURE_DSDN) {
13429 //Use CPP CDS incase h/w supports it.
13430 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13431 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13432 }
13433 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13434 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13435 }
13436
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013437 if (config.hdr_param.hdr_enable) {
13438 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13439 pp_config.hdr_param = config.hdr_param;
13440 }
13441
13442 if (mForceHdrSnapshot) {
13443 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13444 pp_config.hdr_param.hdr_enable = 1;
13445 pp_config.hdr_param.hdr_need_1x = 0;
13446 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13447 }
13448
Thierry Strudel3d639192016-09-09 11:52:26 -070013449 rc = pChannel->addReprocStreamsFromSource(pp_config,
13450 config,
13451 IS_TYPE_NONE,
13452 mMetadataChannel);
13453
13454 if (rc != NO_ERROR) {
13455 delete pChannel;
13456 return NULL;
13457 }
13458 return pChannel;
13459}
13460
13461/*===========================================================================
13462 * FUNCTION : getMobicatMask
13463 *
13464 * DESCRIPTION: returns mobicat mask
13465 *
13466 * PARAMETERS : none
13467 *
13468 * RETURN : mobicat mask
13469 *
13470 *==========================================================================*/
13471uint8_t QCamera3HardwareInterface::getMobicatMask()
13472{
13473 return m_MobicatMask;
13474}
13475
13476/*===========================================================================
13477 * FUNCTION : setMobicat
13478 *
13479 * DESCRIPTION: set Mobicat on/off.
13480 *
13481 * PARAMETERS :
13482 * @params : none
13483 *
13484 * RETURN : int32_t type of status
13485 * NO_ERROR -- success
13486 * none-zero failure code
13487 *==========================================================================*/
13488int32_t QCamera3HardwareInterface::setMobicat()
13489{
13490 char value [PROPERTY_VALUE_MAX];
13491 property_get("persist.camera.mobicat", value, "0");
13492 int32_t ret = NO_ERROR;
13493 uint8_t enableMobi = (uint8_t)atoi(value);
13494
13495 if (enableMobi) {
13496 tune_cmd_t tune_cmd;
13497 tune_cmd.type = SET_RELOAD_CHROMATIX;
13498 tune_cmd.module = MODULE_ALL;
13499 tune_cmd.value = TRUE;
13500 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13501 CAM_INTF_PARM_SET_VFE_COMMAND,
13502 tune_cmd);
13503
13504 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13505 CAM_INTF_PARM_SET_PP_COMMAND,
13506 tune_cmd);
13507 }
13508 m_MobicatMask = enableMobi;
13509
13510 return ret;
13511}
13512
13513/*===========================================================================
13514* FUNCTION : getLogLevel
13515*
13516* DESCRIPTION: Reads the log level property into a variable
13517*
13518* PARAMETERS :
13519* None
13520*
13521* RETURN :
13522* None
13523*==========================================================================*/
13524void QCamera3HardwareInterface::getLogLevel()
13525{
13526 char prop[PROPERTY_VALUE_MAX];
13527 uint32_t globalLogLevel = 0;
13528
13529 property_get("persist.camera.hal.debug", prop, "0");
13530 int val = atoi(prop);
13531 if (0 <= val) {
13532 gCamHal3LogLevel = (uint32_t)val;
13533 }
13534
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013535 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013536 gKpiDebugLevel = atoi(prop);
13537
13538 property_get("persist.camera.global.debug", prop, "0");
13539 val = atoi(prop);
13540 if (0 <= val) {
13541 globalLogLevel = (uint32_t)val;
13542 }
13543
13544 /* Highest log level among hal.logs and global.logs is selected */
13545 if (gCamHal3LogLevel < globalLogLevel)
13546 gCamHal3LogLevel = globalLogLevel;
13547
13548 return;
13549}
13550
13551/*===========================================================================
13552 * FUNCTION : validateStreamRotations
13553 *
13554 * DESCRIPTION: Check if the rotations requested are supported
13555 *
13556 * PARAMETERS :
13557 * @stream_list : streams to be configured
13558 *
13559 * RETURN : NO_ERROR on success
13560 * -EINVAL on failure
13561 *
13562 *==========================================================================*/
13563int QCamera3HardwareInterface::validateStreamRotations(
13564 camera3_stream_configuration_t *streamList)
13565{
13566 int rc = NO_ERROR;
13567
13568 /*
13569 * Loop through all streams requested in configuration
13570 * Check if unsupported rotations have been requested on any of them
13571 */
13572 for (size_t j = 0; j < streamList->num_streams; j++){
13573 camera3_stream_t *newStream = streamList->streams[j];
13574
13575 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13576 bool isImplDef = (newStream->format ==
13577 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13578 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13579 isImplDef);
13580
13581 if (isRotated && (!isImplDef || isZsl)) {
13582 LOGE("Error: Unsupported rotation of %d requested for stream"
13583 "type:%d and stream format:%d",
13584 newStream->rotation, newStream->stream_type,
13585 newStream->format);
13586 rc = -EINVAL;
13587 break;
13588 }
13589 }
13590
13591 return rc;
13592}
13593
13594/*===========================================================================
13595* FUNCTION : getFlashInfo
13596*
13597* DESCRIPTION: Retrieve information about whether the device has a flash.
13598*
13599* PARAMETERS :
13600* @cameraId : Camera id to query
13601* @hasFlash : Boolean indicating whether there is a flash device
13602* associated with given camera
13603* @flashNode : If a flash device exists, this will be its device node.
13604*
13605* RETURN :
13606* None
13607*==========================================================================*/
13608void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13609 bool& hasFlash,
13610 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13611{
13612 cam_capability_t* camCapability = gCamCapability[cameraId];
13613 if (NULL == camCapability) {
13614 hasFlash = false;
13615 flashNode[0] = '\0';
13616 } else {
13617 hasFlash = camCapability->flash_available;
13618 strlcpy(flashNode,
13619 (char*)camCapability->flash_dev_name,
13620 QCAMERA_MAX_FILEPATH_LENGTH);
13621 }
13622}
13623
13624/*===========================================================================
13625* FUNCTION : getEepromVersionInfo
13626*
13627* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13628*
13629* PARAMETERS : None
13630*
13631* RETURN : string describing EEPROM version
13632* "\0" if no such info available
13633*==========================================================================*/
13634const char *QCamera3HardwareInterface::getEepromVersionInfo()
13635{
13636 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13637}
13638
13639/*===========================================================================
13640* FUNCTION : getLdafCalib
13641*
13642* DESCRIPTION: Retrieve Laser AF calibration data
13643*
13644* PARAMETERS : None
13645*
13646* RETURN : Two uint32_t describing laser AF calibration data
13647* NULL if none is available.
13648*==========================================================================*/
13649const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13650{
13651 if (mLdafCalibExist) {
13652 return &mLdafCalib[0];
13653 } else {
13654 return NULL;
13655 }
13656}
13657
13658/*===========================================================================
13659 * FUNCTION : dynamicUpdateMetaStreamInfo
13660 *
13661 * DESCRIPTION: This function:
13662 * (1) stops all the channels
13663 * (2) returns error on pending requests and buffers
13664 * (3) sends metastream_info in setparams
13665 * (4) starts all channels
13666 * This is useful when sensor has to be restarted to apply any
13667 * settings such as frame rate from a different sensor mode
13668 *
13669 * PARAMETERS : None
13670 *
13671 * RETURN : NO_ERROR on success
13672 * Error codes on failure
13673 *
13674 *==========================================================================*/
13675int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13676{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013677 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013678 int rc = NO_ERROR;
13679
13680 LOGD("E");
13681
13682 rc = stopAllChannels();
13683 if (rc < 0) {
13684 LOGE("stopAllChannels failed");
13685 return rc;
13686 }
13687
13688 rc = notifyErrorForPendingRequests();
13689 if (rc < 0) {
13690 LOGE("notifyErrorForPendingRequests failed");
13691 return rc;
13692 }
13693
13694 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13695 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13696 "Format:%d",
13697 mStreamConfigInfo.type[i],
13698 mStreamConfigInfo.stream_sizes[i].width,
13699 mStreamConfigInfo.stream_sizes[i].height,
13700 mStreamConfigInfo.postprocess_mask[i],
13701 mStreamConfigInfo.format[i]);
13702 }
13703
13704 /* Send meta stream info once again so that ISP can start */
13705 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13706 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13707 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13708 mParameters);
13709 if (rc < 0) {
13710 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13711 }
13712
13713 rc = startAllChannels();
13714 if (rc < 0) {
13715 LOGE("startAllChannels failed");
13716 return rc;
13717 }
13718
13719 LOGD("X");
13720 return rc;
13721}
13722
13723/*===========================================================================
13724 * FUNCTION : stopAllChannels
13725 *
13726 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13727 *
13728 * PARAMETERS : None
13729 *
13730 * RETURN : NO_ERROR on success
13731 * Error codes on failure
13732 *
13733 *==========================================================================*/
13734int32_t QCamera3HardwareInterface::stopAllChannels()
13735{
13736 int32_t rc = NO_ERROR;
13737
13738 LOGD("Stopping all channels");
13739 // Stop the Streams/Channels
13740 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13741 it != mStreamInfo.end(); it++) {
13742 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13743 if (channel) {
13744 channel->stop();
13745 }
13746 (*it)->status = INVALID;
13747 }
13748
13749 if (mSupportChannel) {
13750 mSupportChannel->stop();
13751 }
13752 if (mAnalysisChannel) {
13753 mAnalysisChannel->stop();
13754 }
13755 if (mRawDumpChannel) {
13756 mRawDumpChannel->stop();
13757 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013758 if (mHdrPlusRawSrcChannel) {
13759 mHdrPlusRawSrcChannel->stop();
13760 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013761 if (mMetadataChannel) {
13762 /* If content of mStreamInfo is not 0, there is metadata stream */
13763 mMetadataChannel->stop();
13764 }
13765
13766 LOGD("All channels stopped");
13767 return rc;
13768}
13769
13770/*===========================================================================
13771 * FUNCTION : startAllChannels
13772 *
13773 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13774 *
13775 * PARAMETERS : None
13776 *
13777 * RETURN : NO_ERROR on success
13778 * Error codes on failure
13779 *
13780 *==========================================================================*/
13781int32_t QCamera3HardwareInterface::startAllChannels()
13782{
13783 int32_t rc = NO_ERROR;
13784
13785 LOGD("Start all channels ");
13786 // Start the Streams/Channels
13787 if (mMetadataChannel) {
13788 /* If content of mStreamInfo is not 0, there is metadata stream */
13789 rc = mMetadataChannel->start();
13790 if (rc < 0) {
13791 LOGE("META channel start failed");
13792 return rc;
13793 }
13794 }
13795 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13796 it != mStreamInfo.end(); it++) {
13797 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13798 if (channel) {
13799 rc = channel->start();
13800 if (rc < 0) {
13801 LOGE("channel start failed");
13802 return rc;
13803 }
13804 }
13805 }
13806 if (mAnalysisChannel) {
13807 mAnalysisChannel->start();
13808 }
13809 if (mSupportChannel) {
13810 rc = mSupportChannel->start();
13811 if (rc < 0) {
13812 LOGE("Support channel start failed");
13813 return rc;
13814 }
13815 }
13816 if (mRawDumpChannel) {
13817 rc = mRawDumpChannel->start();
13818 if (rc < 0) {
13819 LOGE("RAW dump channel start failed");
13820 return rc;
13821 }
13822 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013823 if (mHdrPlusRawSrcChannel) {
13824 rc = mHdrPlusRawSrcChannel->start();
13825 if (rc < 0) {
13826 LOGE("HDR+ RAW channel start failed");
13827 return rc;
13828 }
13829 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013830
13831 LOGD("All channels started");
13832 return rc;
13833}
13834
13835/*===========================================================================
13836 * FUNCTION : notifyErrorForPendingRequests
13837 *
13838 * DESCRIPTION: This function sends error for all the pending requests/buffers
13839 *
13840 * PARAMETERS : None
13841 *
13842 * RETURN : Error codes
13843 * NO_ERROR on success
13844 *
13845 *==========================================================================*/
13846int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13847{
Emilian Peev7650c122017-01-19 08:24:33 -080013848 notifyErrorFoPendingDepthData(mDepthChannel);
13849
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013850 auto pendingRequest = mPendingRequestsList.begin();
13851 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013852
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013853 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13854 // buffers (for which buffers aren't sent yet).
13855 while (pendingRequest != mPendingRequestsList.end() ||
13856 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13857 if (pendingRequest == mPendingRequestsList.end() ||
13858 pendingBuffer->frame_number < pendingRequest->frame_number) {
13859 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13860 // with error.
13861 for (auto &info : pendingBuffer->mPendingBufferList) {
13862 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070013863 camera3_notify_msg_t notify_msg;
13864 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13865 notify_msg.type = CAMERA3_MSG_ERROR;
13866 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013867 notify_msg.message.error.error_stream = info.stream;
13868 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013869 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013870
13871 camera3_stream_buffer_t buffer = {};
13872 buffer.acquire_fence = -1;
13873 buffer.release_fence = -1;
13874 buffer.buffer = info.buffer;
13875 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13876 buffer.stream = info.stream;
13877 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070013878 }
13879
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013880 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13881 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
13882 pendingBuffer->frame_number > pendingRequest->frame_number) {
13883 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070013884 camera3_notify_msg_t notify_msg;
13885 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13886 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013887 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
13888 notify_msg.message.error.error_stream = nullptr;
13889 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013890 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013891
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013892 if (pendingRequest->input_buffer != nullptr) {
13893 camera3_capture_result result = {};
13894 result.frame_number = pendingRequest->frame_number;
13895 result.result = nullptr;
13896 result.input_buffer = pendingRequest->input_buffer;
13897 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013898 }
13899
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013900 mShutterDispatcher.clear(pendingRequest->frame_number);
13901 pendingRequest = mPendingRequestsList.erase(pendingRequest);
13902 } else {
13903 // If both buffers and result metadata weren't sent yet, notify about a request error
13904 // and return buffers with error.
13905 for (auto &info : pendingBuffer->mPendingBufferList) {
13906 camera3_notify_msg_t notify_msg;
13907 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13908 notify_msg.type = CAMERA3_MSG_ERROR;
13909 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13910 notify_msg.message.error.error_stream = info.stream;
13911 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
13912 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013913
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013914 camera3_stream_buffer_t buffer = {};
13915 buffer.acquire_fence = -1;
13916 buffer.release_fence = -1;
13917 buffer.buffer = info.buffer;
13918 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13919 buffer.stream = info.stream;
13920 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
13921 }
13922
13923 if (pendingRequest->input_buffer != nullptr) {
13924 camera3_capture_result result = {};
13925 result.frame_number = pendingRequest->frame_number;
13926 result.result = nullptr;
13927 result.input_buffer = pendingRequest->input_buffer;
13928 orchestrateResult(&result);
13929 }
13930
13931 mShutterDispatcher.clear(pendingRequest->frame_number);
13932 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13933 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070013934 }
13935 }
13936
13937 /* Reset pending frame Drop list and requests list */
13938 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013939 mShutterDispatcher.clear();
13940 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070013941 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013942 LOGH("Cleared all the pending buffers ");
13943
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013944 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013945}
13946
13947bool QCamera3HardwareInterface::isOnEncoder(
13948 const cam_dimension_t max_viewfinder_size,
13949 uint32_t width, uint32_t height)
13950{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013951 return ((width > (uint32_t)max_viewfinder_size.width) ||
13952 (height > (uint32_t)max_viewfinder_size.height) ||
13953 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13954 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013955}
13956
13957/*===========================================================================
13958 * FUNCTION : setBundleInfo
13959 *
13960 * DESCRIPTION: Set bundle info for all streams that are bundle.
13961 *
13962 * PARAMETERS : None
13963 *
13964 * RETURN : NO_ERROR on success
13965 * Error codes on failure
13966 *==========================================================================*/
13967int32_t QCamera3HardwareInterface::setBundleInfo()
13968{
13969 int32_t rc = NO_ERROR;
13970
13971 if (mChannelHandle) {
13972 cam_bundle_config_t bundleInfo;
13973 memset(&bundleInfo, 0, sizeof(bundleInfo));
13974 rc = mCameraHandle->ops->get_bundle_info(
13975 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13976 if (rc != NO_ERROR) {
13977 LOGE("get_bundle_info failed");
13978 return rc;
13979 }
13980 if (mAnalysisChannel) {
13981 mAnalysisChannel->setBundleInfo(bundleInfo);
13982 }
13983 if (mSupportChannel) {
13984 mSupportChannel->setBundleInfo(bundleInfo);
13985 }
13986 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13987 it != mStreamInfo.end(); it++) {
13988 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13989 channel->setBundleInfo(bundleInfo);
13990 }
13991 if (mRawDumpChannel) {
13992 mRawDumpChannel->setBundleInfo(bundleInfo);
13993 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013994 if (mHdrPlusRawSrcChannel) {
13995 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13996 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013997 }
13998
13999 return rc;
14000}
14001
14002/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014003 * FUNCTION : setInstantAEC
14004 *
14005 * DESCRIPTION: Set Instant AEC related params.
14006 *
14007 * PARAMETERS :
14008 * @meta: CameraMetadata reference
14009 *
14010 * RETURN : NO_ERROR on success
14011 * Error codes on failure
14012 *==========================================================================*/
14013int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14014{
14015 int32_t rc = NO_ERROR;
14016 uint8_t val = 0;
14017 char prop[PROPERTY_VALUE_MAX];
14018
14019 // First try to configure instant AEC from framework metadata
14020 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14021 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14022 }
14023
14024 // If framework did not set this value, try to read from set prop.
14025 if (val == 0) {
14026 memset(prop, 0, sizeof(prop));
14027 property_get("persist.camera.instant.aec", prop, "0");
14028 val = (uint8_t)atoi(prop);
14029 }
14030
14031 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14032 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14033 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14034 mInstantAEC = val;
14035 mInstantAECSettledFrameNumber = 0;
14036 mInstantAecFrameIdxCount = 0;
14037 LOGH("instantAEC value set %d",val);
14038 if (mInstantAEC) {
14039 memset(prop, 0, sizeof(prop));
14040 property_get("persist.camera.ae.instant.bound", prop, "10");
14041 int32_t aec_frame_skip_cnt = atoi(prop);
14042 if (aec_frame_skip_cnt >= 0) {
14043 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14044 } else {
14045 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14046 rc = BAD_VALUE;
14047 }
14048 }
14049 } else {
14050 LOGE("Bad instant aec value set %d", val);
14051 rc = BAD_VALUE;
14052 }
14053 return rc;
14054}
14055
14056/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014057 * FUNCTION : get_num_overall_buffers
14058 *
14059 * DESCRIPTION: Estimate number of pending buffers across all requests.
14060 *
14061 * PARAMETERS : None
14062 *
14063 * RETURN : Number of overall pending buffers
14064 *
14065 *==========================================================================*/
14066uint32_t PendingBuffersMap::get_num_overall_buffers()
14067{
14068 uint32_t sum_buffers = 0;
14069 for (auto &req : mPendingBuffersInRequest) {
14070 sum_buffers += req.mPendingBufferList.size();
14071 }
14072 return sum_buffers;
14073}
14074
14075/*===========================================================================
14076 * FUNCTION : removeBuf
14077 *
14078 * DESCRIPTION: Remove a matching buffer from tracker.
14079 *
14080 * PARAMETERS : @buffer: image buffer for the callback
14081 *
14082 * RETURN : None
14083 *
14084 *==========================================================================*/
14085void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14086{
14087 bool buffer_found = false;
14088 for (auto req = mPendingBuffersInRequest.begin();
14089 req != mPendingBuffersInRequest.end(); req++) {
14090 for (auto k = req->mPendingBufferList.begin();
14091 k != req->mPendingBufferList.end(); k++ ) {
14092 if (k->buffer == buffer) {
14093 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14094 req->frame_number, buffer);
14095 k = req->mPendingBufferList.erase(k);
14096 if (req->mPendingBufferList.empty()) {
14097 // Remove this request from Map
14098 req = mPendingBuffersInRequest.erase(req);
14099 }
14100 buffer_found = true;
14101 break;
14102 }
14103 }
14104 if (buffer_found) {
14105 break;
14106 }
14107 }
14108 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14109 get_num_overall_buffers());
14110}
14111
14112/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014113 * FUNCTION : getBufErrStatus
14114 *
14115 * DESCRIPTION: get buffer error status
14116 *
14117 * PARAMETERS : @buffer: buffer handle
14118 *
14119 * RETURN : Error status
14120 *
14121 *==========================================================================*/
14122int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14123{
14124 for (auto& req : mPendingBuffersInRequest) {
14125 for (auto& k : req.mPendingBufferList) {
14126 if (k.buffer == buffer)
14127 return k.bufStatus;
14128 }
14129 }
14130 return CAMERA3_BUFFER_STATUS_OK;
14131}
14132
14133/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014134 * FUNCTION : setPAAFSupport
14135 *
14136 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14137 * feature mask according to stream type and filter
14138 * arrangement
14139 *
14140 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14141 * @stream_type: stream type
14142 * @filter_arrangement: filter arrangement
14143 *
14144 * RETURN : None
14145 *==========================================================================*/
14146void QCamera3HardwareInterface::setPAAFSupport(
14147 cam_feature_mask_t& feature_mask,
14148 cam_stream_type_t stream_type,
14149 cam_color_filter_arrangement_t filter_arrangement)
14150{
Thierry Strudel3d639192016-09-09 11:52:26 -070014151 switch (filter_arrangement) {
14152 case CAM_FILTER_ARRANGEMENT_RGGB:
14153 case CAM_FILTER_ARRANGEMENT_GRBG:
14154 case CAM_FILTER_ARRANGEMENT_GBRG:
14155 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014156 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14157 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014158 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014159 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14160 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014161 }
14162 break;
14163 case CAM_FILTER_ARRANGEMENT_Y:
14164 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14165 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14166 }
14167 break;
14168 default:
14169 break;
14170 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014171 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14172 feature_mask, stream_type, filter_arrangement);
14173
14174
Thierry Strudel3d639192016-09-09 11:52:26 -070014175}
14176
14177/*===========================================================================
14178* FUNCTION : getSensorMountAngle
14179*
14180* DESCRIPTION: Retrieve sensor mount angle
14181*
14182* PARAMETERS : None
14183*
14184* RETURN : sensor mount angle in uint32_t
14185*==========================================================================*/
14186uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14187{
14188 return gCamCapability[mCameraId]->sensor_mount_angle;
14189}
14190
14191/*===========================================================================
14192* FUNCTION : getRelatedCalibrationData
14193*
14194* DESCRIPTION: Retrieve related system calibration data
14195*
14196* PARAMETERS : None
14197*
14198* RETURN : Pointer of related system calibration data
14199*==========================================================================*/
14200const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14201{
14202 return (const cam_related_system_calibration_data_t *)
14203 &(gCamCapability[mCameraId]->related_cam_calibration);
14204}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014205
14206/*===========================================================================
14207 * FUNCTION : is60HzZone
14208 *
14209 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14210 *
14211 * PARAMETERS : None
14212 *
14213 * RETURN : True if in 60Hz zone, False otherwise
14214 *==========================================================================*/
14215bool QCamera3HardwareInterface::is60HzZone()
14216{
14217 time_t t = time(NULL);
14218 struct tm lt;
14219
14220 struct tm* r = localtime_r(&t, &lt);
14221
14222 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14223 return true;
14224 else
14225 return false;
14226}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014227
14228/*===========================================================================
14229 * FUNCTION : adjustBlackLevelForCFA
14230 *
14231 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14232 * of bayer CFA (Color Filter Array).
14233 *
14234 * PARAMETERS : @input: black level pattern in the order of RGGB
14235 * @output: black level pattern in the order of CFA
14236 * @color_arrangement: CFA color arrangement
14237 *
14238 * RETURN : None
14239 *==========================================================================*/
14240template<typename T>
14241void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14242 T input[BLACK_LEVEL_PATTERN_CNT],
14243 T output[BLACK_LEVEL_PATTERN_CNT],
14244 cam_color_filter_arrangement_t color_arrangement)
14245{
14246 switch (color_arrangement) {
14247 case CAM_FILTER_ARRANGEMENT_GRBG:
14248 output[0] = input[1];
14249 output[1] = input[0];
14250 output[2] = input[3];
14251 output[3] = input[2];
14252 break;
14253 case CAM_FILTER_ARRANGEMENT_GBRG:
14254 output[0] = input[2];
14255 output[1] = input[3];
14256 output[2] = input[0];
14257 output[3] = input[1];
14258 break;
14259 case CAM_FILTER_ARRANGEMENT_BGGR:
14260 output[0] = input[3];
14261 output[1] = input[2];
14262 output[2] = input[1];
14263 output[3] = input[0];
14264 break;
14265 case CAM_FILTER_ARRANGEMENT_RGGB:
14266 output[0] = input[0];
14267 output[1] = input[1];
14268 output[2] = input[2];
14269 output[3] = input[3];
14270 break;
14271 default:
14272 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14273 break;
14274 }
14275}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014276
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014277void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14278 CameraMetadata &resultMetadata,
14279 std::shared_ptr<metadata_buffer_t> settings)
14280{
14281 if (settings == nullptr) {
14282 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14283 return;
14284 }
14285
14286 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14287 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14288 }
14289
14290 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14291 String8 str((const char *)gps_methods);
14292 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14293 }
14294
14295 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14296 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14297 }
14298
14299 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14300 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14301 }
14302
14303 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14304 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14305 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14306 }
14307
14308 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14309 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14310 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14311 }
14312
14313 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14314 int32_t fwk_thumb_size[2];
14315 fwk_thumb_size[0] = thumb_size->width;
14316 fwk_thumb_size[1] = thumb_size->height;
14317 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14318 }
14319
14320 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14321 uint8_t fwk_intent = intent[0];
14322 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14323 }
14324}
14325
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014326bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14327 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14328 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014329{
14330 if (hdrPlusRequest == nullptr) return false;
14331
14332 // Check noise reduction mode is high quality.
14333 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14334 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14335 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014336 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14337 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014338 return false;
14339 }
14340
14341 // Check edge mode is high quality.
14342 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14343 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14344 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14345 return false;
14346 }
14347
14348 if (request.num_output_buffers != 1 ||
14349 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14350 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014351 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14352 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14353 request.output_buffers[0].stream->width,
14354 request.output_buffers[0].stream->height,
14355 request.output_buffers[0].stream->format);
14356 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014357 return false;
14358 }
14359
14360 // Get a YUV buffer from pic channel.
14361 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14362 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14363 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14364 if (res != OK) {
14365 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14366 __FUNCTION__, strerror(-res), res);
14367 return false;
14368 }
14369
14370 pbcamera::StreamBuffer buffer;
14371 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014372 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014373 buffer.data = yuvBuffer->buffer;
14374 buffer.dataSize = yuvBuffer->frame_len;
14375
14376 pbcamera::CaptureRequest pbRequest;
14377 pbRequest.id = request.frame_number;
14378 pbRequest.outputBuffers.push_back(buffer);
14379
14380 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014381 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014382 if (res != OK) {
14383 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14384 strerror(-res), res);
14385 return false;
14386 }
14387
14388 hdrPlusRequest->yuvBuffer = yuvBuffer;
14389 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14390
14391 return true;
14392}
14393
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014394status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14395{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014396 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14397 return OK;
14398 }
14399
14400 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14401 if (res != OK) {
14402 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14403 strerror(-res), res);
14404 return res;
14405 }
14406 gHdrPlusClientOpening = true;
14407
14408 return OK;
14409}
14410
Chien-Yu Chenee335912017-02-09 17:53:20 -080014411status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14412{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014413 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014414
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014415 if (mHdrPlusModeEnabled) {
14416 return OK;
14417 }
14418
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014419 // Check if gHdrPlusClient is opened or being opened.
14420 if (gHdrPlusClient == nullptr) {
14421 if (gHdrPlusClientOpening) {
14422 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14423 return OK;
14424 }
14425
14426 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014427 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014428 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14429 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014430 return res;
14431 }
14432
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014433 // When opening HDR+ client completes, HDR+ mode will be enabled.
14434 return OK;
14435
Chien-Yu Chenee335912017-02-09 17:53:20 -080014436 }
14437
14438 // Configure stream for HDR+.
14439 res = configureHdrPlusStreamsLocked();
14440 if (res != OK) {
14441 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014442 return res;
14443 }
14444
14445 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14446 res = gHdrPlusClient->setZslHdrPlusMode(true);
14447 if (res != OK) {
14448 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014449 return res;
14450 }
14451
14452 mHdrPlusModeEnabled = true;
14453 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14454
14455 return OK;
14456}
14457
14458void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14459{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014460 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014461 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014462 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14463 if (res != OK) {
14464 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14465 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014466
14467 // Close HDR+ client so Easel can enter low power mode.
14468 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14469 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014470 }
14471
14472 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014473 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014474 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14475}
14476
14477status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014478{
14479 pbcamera::InputConfiguration inputConfig;
14480 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14481 status_t res = OK;
14482
14483 // Configure HDR+ client streams.
14484 // Get input config.
14485 if (mHdrPlusRawSrcChannel) {
14486 // HDR+ input buffers will be provided by HAL.
14487 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14488 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14489 if (res != OK) {
14490 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14491 __FUNCTION__, strerror(-res), res);
14492 return res;
14493 }
14494
14495 inputConfig.isSensorInput = false;
14496 } else {
14497 // Sensor MIPI will send data to Easel.
14498 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014499 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014500 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14501 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14502 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14503 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14504 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014505 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014506 if (mSensorModeInfo.num_raw_bits != 10) {
14507 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14508 mSensorModeInfo.num_raw_bits);
14509 return BAD_VALUE;
14510 }
14511
14512 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014513 }
14514
14515 // Get output configurations.
14516 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014517 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014518
14519 // Easel may need to output YUV output buffers if mPictureChannel was created.
14520 pbcamera::StreamConfiguration yuvOutputConfig;
14521 if (mPictureChannel != nullptr) {
14522 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14523 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14524 if (res != OK) {
14525 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14526 __FUNCTION__, strerror(-res), res);
14527
14528 return res;
14529 }
14530
14531 outputStreamConfigs.push_back(yuvOutputConfig);
14532 }
14533
14534 // TODO: consider other channels for YUV output buffers.
14535
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014536 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014537 if (res != OK) {
14538 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14539 strerror(-res), res);
14540 return res;
14541 }
14542
14543 return OK;
14544}
14545
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014546void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14547{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014548 if (client == nullptr) {
14549 ALOGE("%s: Opened client is null.", __FUNCTION__);
14550 return;
14551 }
14552
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014553 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014554 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14555
14556 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014557 if (!gHdrPlusClientOpening) {
14558 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14559 return;
14560 }
14561
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014562 gHdrPlusClient = std::move(client);
14563 gHdrPlusClientOpening = false;
14564
14565 // Set static metadata.
14566 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14567 if (res != OK) {
14568 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14569 __FUNCTION__, strerror(-res), res);
14570 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14571 gHdrPlusClient = nullptr;
14572 return;
14573 }
14574
14575 // Enable HDR+ mode.
14576 res = enableHdrPlusModeLocked();
14577 if (res != OK) {
14578 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14579 }
14580}
14581
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014582void QCamera3HardwareInterface::onOpenFailed(status_t err)
14583{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014584 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14585 Mutex::Autolock l(gHdrPlusClientLock);
14586 gHdrPlusClientOpening = false;
14587}
14588
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014589void QCamera3HardwareInterface::onFatalError()
14590{
14591 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14592
14593 // Set HAL state to error.
14594 pthread_mutex_lock(&mMutex);
14595 mState = ERROR;
14596 pthread_mutex_unlock(&mMutex);
14597
14598 handleCameraDeviceError();
14599}
14600
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014601void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014602 const camera_metadata_t &resultMetadata)
14603{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014604 if (result != nullptr) {
14605 if (result->outputBuffers.size() != 1) {
14606 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14607 result->outputBuffers.size());
14608 return;
14609 }
14610
14611 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14612 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14613 result->outputBuffers[0].streamId);
14614 return;
14615 }
14616
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014617 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014618 HdrPlusPendingRequest pendingRequest;
14619 {
14620 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14621 auto req = mHdrPlusPendingRequests.find(result->requestId);
14622 pendingRequest = req->second;
14623 }
14624
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014625 // Update the result metadata with the settings of the HDR+ still capture request because
14626 // the result metadata belongs to a ZSL buffer.
14627 CameraMetadata metadata;
14628 metadata = &resultMetadata;
14629 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14630 camera_metadata_t* updatedResultMetadata = metadata.release();
14631
14632 QCamera3PicChannel *picChannel =
14633 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14634
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014635 // Check if dumping HDR+ YUV output is enabled.
14636 char prop[PROPERTY_VALUE_MAX];
14637 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14638 bool dumpYuvOutput = atoi(prop);
14639
14640 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014641 // Dump yuv buffer to a ppm file.
14642 pbcamera::StreamConfiguration outputConfig;
14643 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14644 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14645 if (rc == OK) {
14646 char buf[FILENAME_MAX] = {};
14647 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14648 result->requestId, result->outputBuffers[0].streamId,
14649 outputConfig.image.width, outputConfig.image.height);
14650
14651 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14652 } else {
14653 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14654 __FUNCTION__, strerror(-rc), rc);
14655 }
14656 }
14657
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014658 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14659 auto halMetadata = std::make_shared<metadata_buffer_t>();
14660 clear_metadata_buffer(halMetadata.get());
14661
14662 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14663 // encoding.
14664 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14665 halStreamId, /*minFrameDuration*/0);
14666 if (res == OK) {
14667 // Return the buffer to pic channel for encoding.
14668 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14669 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14670 halMetadata);
14671 } else {
14672 // Return the buffer without encoding.
14673 // TODO: This should not happen but we may want to report an error buffer to camera
14674 // service.
14675 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14676 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14677 strerror(-res), res);
14678 }
14679
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014680 // Find the timestamp
14681 camera_metadata_ro_entry_t entry;
14682 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14683 ANDROID_SENSOR_TIMESTAMP, &entry);
14684 if (res != OK) {
14685 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14686 __FUNCTION__, result->requestId, strerror(-res), res);
14687 } else {
14688 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14689 }
14690
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014691 // Send HDR+ metadata to framework.
14692 {
14693 pthread_mutex_lock(&mMutex);
14694
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014695 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14696 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014697 pthread_mutex_unlock(&mMutex);
14698 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014699
14700 // Remove the HDR+ pending request.
14701 {
14702 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14703 auto req = mHdrPlusPendingRequests.find(result->requestId);
14704 mHdrPlusPendingRequests.erase(req);
14705 }
14706 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014707}
14708
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014709void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14710{
14711 if (failedResult == nullptr) {
14712 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14713 return;
14714 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014715
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014716 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014717
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014718 // Remove the pending HDR+ request.
14719 {
14720 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14721 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14722
14723 // Return the buffer to pic channel.
14724 QCamera3PicChannel *picChannel =
14725 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14726 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14727
14728 mHdrPlusPendingRequests.erase(pendingRequest);
14729 }
14730
14731 pthread_mutex_lock(&mMutex);
14732
14733 // Find the pending buffers.
14734 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14735 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14736 if (pendingBuffers->frame_number == failedResult->requestId) {
14737 break;
14738 }
14739 pendingBuffers++;
14740 }
14741
14742 // Send out buffer errors for the pending buffers.
14743 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14744 std::vector<camera3_stream_buffer_t> streamBuffers;
14745 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14746 // Prepare a stream buffer.
14747 camera3_stream_buffer_t streamBuffer = {};
14748 streamBuffer.stream = buffer.stream;
14749 streamBuffer.buffer = buffer.buffer;
14750 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14751 streamBuffer.acquire_fence = -1;
14752 streamBuffer.release_fence = -1;
14753
14754 streamBuffers.push_back(streamBuffer);
14755
14756 // Send out error buffer event.
14757 camera3_notify_msg_t notify_msg = {};
14758 notify_msg.type = CAMERA3_MSG_ERROR;
14759 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14760 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14761 notify_msg.message.error.error_stream = buffer.stream;
14762
14763 orchestrateNotify(&notify_msg);
14764 }
14765
14766 camera3_capture_result_t result = {};
14767 result.frame_number = pendingBuffers->frame_number;
14768 result.num_output_buffers = streamBuffers.size();
14769 result.output_buffers = &streamBuffers[0];
14770
14771 // Send out result with buffer errors.
14772 orchestrateResult(&result);
14773
14774 // Remove pending buffers.
14775 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14776 }
14777
14778 // Remove pending request.
14779 auto halRequest = mPendingRequestsList.begin();
14780 while (halRequest != mPendingRequestsList.end()) {
14781 if (halRequest->frame_number == failedResult->requestId) {
14782 mPendingRequestsList.erase(halRequest);
14783 break;
14784 }
14785 halRequest++;
14786 }
14787
14788 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014789}
14790
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014791
14792ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14793 mParent(parent) {}
14794
14795void ShutterDispatcher::expectShutter(uint32_t frameNumber)
14796{
14797 std::lock_guard<std::mutex> lock(mLock);
14798 mShutters.emplace(frameNumber, Shutter());
14799}
14800
14801void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14802{
14803 std::lock_guard<std::mutex> lock(mLock);
14804
14805 // Make this frame's shutter ready.
14806 auto shutter = mShutters.find(frameNumber);
14807 if (shutter == mShutters.end()) {
14808 // Shutter was already sent.
14809 return;
14810 }
14811
14812 shutter->second.ready = true;
14813 shutter->second.timestamp = timestamp;
14814
14815 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
14816 shutter = mShutters.begin();
14817 while (shutter != mShutters.end()) {
14818 if (!shutter->second.ready) {
14819 // If this shutter is not ready, the following shutters can't be sent.
14820 break;
14821 }
14822
14823 camera3_notify_msg_t msg = {};
14824 msg.type = CAMERA3_MSG_SHUTTER;
14825 msg.message.shutter.frame_number = shutter->first;
14826 msg.message.shutter.timestamp = shutter->second.timestamp;
14827 mParent->orchestrateNotify(&msg);
14828
14829 shutter = mShutters.erase(shutter);
14830 }
14831}
14832
14833void ShutterDispatcher::clear(uint32_t frameNumber)
14834{
14835 std::lock_guard<std::mutex> lock(mLock);
14836 mShutters.erase(frameNumber);
14837}
14838
14839void ShutterDispatcher::clear()
14840{
14841 std::lock_guard<std::mutex> lock(mLock);
14842
14843 // Log errors for stale shutters.
14844 for (auto &shutter : mShutters) {
14845 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14846 __FUNCTION__, shutter.first, shutter.second.ready,
14847 shutter.second.timestamp);
14848 }
14849 mShutters.clear();
14850}
14851
14852OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
14853 mParent(parent) {}
14854
14855status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
14856{
14857 std::lock_guard<std::mutex> lock(mLock);
14858 mStreamBuffers.clear();
14859 if (!streamList) {
14860 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
14861 return -EINVAL;
14862 }
14863
14864 // Create a "frame-number -> buffer" map for each stream.
14865 for (uint32_t i = 0; i < streamList->num_streams; i++) {
14866 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
14867 }
14868
14869 return OK;
14870}
14871
14872status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
14873{
14874 std::lock_guard<std::mutex> lock(mLock);
14875
14876 // Find the "frame-number -> buffer" map for the stream.
14877 auto buffers = mStreamBuffers.find(stream);
14878 if (buffers == mStreamBuffers.end()) {
14879 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
14880 return -EINVAL;
14881 }
14882
14883 // Create an unready buffer for this frame number.
14884 buffers->second.emplace(frameNumber, Buffer());
14885 return OK;
14886}
14887
14888void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
14889 const camera3_stream_buffer_t &buffer)
14890{
14891 std::lock_guard<std::mutex> lock(mLock);
14892
14893 // Find the frame number -> buffer map for the stream.
14894 auto buffers = mStreamBuffers.find(buffer.stream);
14895 if (buffers == mStreamBuffers.end()) {
14896 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
14897 return;
14898 }
14899
14900 // Find the unready buffer this frame number and mark it ready.
14901 auto pendingBuffer = buffers->second.find(frameNumber);
14902 if (pendingBuffer == buffers->second.end()) {
14903 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
14904 return;
14905 }
14906
14907 pendingBuffer->second.ready = true;
14908 pendingBuffer->second.buffer = buffer;
14909
14910 // Iterate through the buffers and send out buffers until the one that's not ready yet.
14911 pendingBuffer = buffers->second.begin();
14912 while (pendingBuffer != buffers->second.end()) {
14913 if (!pendingBuffer->second.ready) {
14914 // If this buffer is not ready, the following buffers can't be sent.
14915 break;
14916 }
14917
14918 camera3_capture_result_t result = {};
14919 result.frame_number = pendingBuffer->first;
14920 result.num_output_buffers = 1;
14921 result.output_buffers = &pendingBuffer->second.buffer;
14922
14923 // Send out result with buffer errors.
14924 mParent->orchestrateResult(&result);
14925
14926 pendingBuffer = buffers->second.erase(pendingBuffer);
14927 }
14928}
14929
14930void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
14931{
14932 std::lock_guard<std::mutex> lock(mLock);
14933
14934 // Log errors for stale buffers.
14935 for (auto &buffers : mStreamBuffers) {
14936 for (auto &buffer : buffers.second) {
14937 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
14938 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
14939 }
14940 buffers.second.clear();
14941 }
14942
14943 if (clearConfiguredStreams) {
14944 mStreamBuffers.clear();
14945 }
14946}
14947
Thierry Strudel3d639192016-09-09 11:52:26 -070014948}; //end namespace qcamera