blob: 1c2d84f4af96b5b4e0eec0118ad1120444510429 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
Jiyong Parkd4caeb72017-06-12 17:16:36 +090068using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070069using namespace android;
70
71namespace qcamera {
72
73#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
74
75#define EMPTY_PIPELINE_DELAY 2
76#define PARTIAL_RESULT_COUNT 2
77#define FRAME_SKIP_DELAY 0
78
79#define MAX_VALUE_8BIT ((1<<8)-1)
80#define MAX_VALUE_10BIT ((1<<10)-1)
81#define MAX_VALUE_12BIT ((1<<12)-1)
82
83#define VIDEO_4K_WIDTH 3840
84#define VIDEO_4K_HEIGHT 2160
85
Jason Leeb9e76432017-03-10 17:14:19 -080086#define MAX_EIS_WIDTH 3840
87#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070088
89#define MAX_RAW_STREAMS 1
90#define MAX_STALLING_STREAMS 1
91#define MAX_PROCESSED_STREAMS 3
92/* Batch mode is enabled only if FPS set is equal to or greater than this */
93#define MIN_FPS_FOR_BATCH_MODE (120)
94#define PREVIEW_FPS_FOR_HFR (30)
95#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080096#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070097#define MAX_HFR_BATCH_SIZE (8)
98#define REGIONS_TUPLE_COUNT 5
99#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
101#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700137// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
138#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
139
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700140// Whether to check for the GPU stride padding, or use the default
141//#define CHECK_GPU_PIXEL_ALIGNMENT
142
Thierry Strudel3d639192016-09-09 11:52:26 -0700143cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
144const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
145extern pthread_mutex_t gCamLock;
146volatile uint32_t gCamHal3LogLevel = 1;
147extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700148
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800149// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700151std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700152bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
153std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
154bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700155bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700156bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700157
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800158// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
159bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
161Mutex gHdrPlusClientLock; // Protect above Easel related variables.
162
Thierry Strudel3d639192016-09-09 11:52:26 -0700163
164const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
165 {"On", CAM_CDS_MODE_ON},
166 {"Off", CAM_CDS_MODE_OFF},
167 {"Auto",CAM_CDS_MODE_AUTO}
168};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_video_hdr_mode_t,
171 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
172 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
173 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
174};
175
Thierry Strudel54dc9782017-02-15 12:12:10 -0800176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_binning_correction_mode_t,
178 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
179 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
180 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
181};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700182
183const QCamera3HardwareInterface::QCameraMap<
184 camera_metadata_enum_android_ir_mode_t,
185 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
186 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
187 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
188 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
189};
Thierry Strudel3d639192016-09-09 11:52:26 -0700190
191const QCamera3HardwareInterface::QCameraMap<
192 camera_metadata_enum_android_control_effect_mode_t,
193 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
194 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
195 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
196 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
197 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
199 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
200 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
202 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
203};
204
205const QCamera3HardwareInterface::QCameraMap<
206 camera_metadata_enum_android_control_awb_mode_t,
207 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
208 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
209 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
210 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
211 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
212 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
213 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
215 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
216 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220 camera_metadata_enum_android_control_scene_mode_t,
221 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
222 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
223 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
224 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
225 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
227 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
228 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
229 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
230 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
231 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
232 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
233 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
234 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
235 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
236 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800237 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
238 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700239};
240
241const QCamera3HardwareInterface::QCameraMap<
242 camera_metadata_enum_android_control_af_mode_t,
243 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
245 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
246 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
247 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
248 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
250 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
251};
252
253const QCamera3HardwareInterface::QCameraMap<
254 camera_metadata_enum_android_color_correction_aberration_mode_t,
255 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
257 CAM_COLOR_CORRECTION_ABERRATION_OFF },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
259 CAM_COLOR_CORRECTION_ABERRATION_FAST },
260 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
261 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
262};
263
264const QCamera3HardwareInterface::QCameraMap<
265 camera_metadata_enum_android_control_ae_antibanding_mode_t,
266 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
270 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
271};
272
273const QCamera3HardwareInterface::QCameraMap<
274 camera_metadata_enum_android_control_ae_mode_t,
275 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
276 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
279 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
280 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_flash_mode_t,
285 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
287 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 camera_metadata_enum_android_statistics_face_detect_mode_t,
293 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303 CAM_FOCUS_UNCALIBRATED },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305 CAM_FOCUS_APPROXIMATE },
306 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307 CAM_FOCUS_CALIBRATED }
308};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_lens_state_t,
312 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
314 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
315};
316
317const int32_t available_thumbnail_sizes[] = {0, 0,
318 176, 144,
319 240, 144,
320 256, 144,
321 240, 160,
322 256, 154,
323 240, 240,
324 320, 240};
325
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_test_pattern_mode_t,
328 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
335};
336
337/* Since there is no mapping for all the options some Android enum are not listed.
338 * Also, the order in this list is important because while mapping from HAL to Android it will
339 * traverse from lower to higher index which means that for HAL values that are map to different
340 * Android values, the traverse logic will select the first one found.
341 */
342const QCamera3HardwareInterface::QCameraMap<
343 camera_metadata_enum_android_sensor_reference_illuminant1_t,
344 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361};
362
363const QCamera3HardwareInterface::QCameraMap<
364 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365 { 60, CAM_HFR_MODE_60FPS},
366 { 90, CAM_HFR_MODE_90FPS},
367 { 120, CAM_HFR_MODE_120FPS},
368 { 150, CAM_HFR_MODE_150FPS},
369 { 180, CAM_HFR_MODE_180FPS},
370 { 210, CAM_HFR_MODE_210FPS},
371 { 240, CAM_HFR_MODE_240FPS},
372 { 480, CAM_HFR_MODE_480FPS},
373};
374
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700375const QCamera3HardwareInterface::QCameraMap<
376 qcamera3_ext_instant_aec_mode_t,
377 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800382
383const QCamera3HardwareInterface::QCameraMap<
384 qcamera3_ext_exposure_meter_mode_t,
385 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393};
394
395const QCamera3HardwareInterface::QCameraMap<
396 qcamera3_ext_iso_mode_t,
397 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406};
407
Thierry Strudel3d639192016-09-09 11:52:26 -0700408camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409 .initialize = QCamera3HardwareInterface::initialize,
410 .configure_streams = QCamera3HardwareInterface::configure_streams,
411 .register_stream_buffers = NULL,
412 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
414 .get_metadata_vendor_tag_ops = NULL,
415 .dump = QCamera3HardwareInterface::dump,
416 .flush = QCamera3HardwareInterface::flush,
417 .reserved = {0},
418};
419
420// initialise to some default value
421uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700423static inline void logEaselEvent(const char *tag, const char *event) {
424 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425 struct timespec ts = {};
426 static int64_t kMsPerSec = 1000;
427 static int64_t kNsPerMs = 1000000;
428 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429 if (res != OK) {
430 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431 } else {
432 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434 }
435 }
436}
437
Thierry Strudel3d639192016-09-09 11:52:26 -0700438/*===========================================================================
439 * FUNCTION : QCamera3HardwareInterface
440 *
441 * DESCRIPTION: constructor of QCamera3HardwareInterface
442 *
443 * PARAMETERS :
444 * @cameraId : camera ID
445 *
446 * RETURN : none
447 *==========================================================================*/
448QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449 const camera_module_callbacks_t *callbacks)
450 : mCameraId(cameraId),
451 mCameraHandle(NULL),
452 mCameraInitialized(false),
453 mCallbackOps(NULL),
454 mMetadataChannel(NULL),
455 mPictureChannel(NULL),
456 mRawChannel(NULL),
457 mSupportChannel(NULL),
458 mAnalysisChannel(NULL),
459 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700460 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800462 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100463 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800464 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mChannelHandle(0),
466 mFirstConfiguration(true),
467 mFlush(false),
468 mFlushPerf(false),
469 mParamHeap(NULL),
470 mParameters(NULL),
471 mPrevParameters(NULL),
472 m_bIsVideo(false),
473 m_bIs4KVideo(false),
474 m_bEisSupportedSize(false),
475 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800476 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700478 mShutterDispatcher(this),
479 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 mMinProcessedFrameDuration(0),
481 mMinJpegFrameDuration(0),
482 mMinRawFrameDuration(0),
483 mMetaFrameCount(0U),
484 mUpdateDebugLevel(false),
485 mCallbacks(callbacks),
486 mCaptureIntent(0),
487 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700488 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800489 /* DevCamDebug metadata internal m control*/
490 mDevCamDebugMetaEnable(0),
491 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700492 mBatchSize(0),
493 mToBeQueuedVidBufs(0),
494 mHFRVideoFps(DEFAULT_VIDEO_FPS),
495 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800496 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800497 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700498 mFirstFrameNumberInBatch(0),
499 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800500 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700501 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
502 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000503 mPDSupported(false),
504 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700505 mInstantAEC(false),
506 mResetInstantAEC(false),
507 mInstantAECSettledFrameNumber(0),
508 mAecSkipDisplayFrameBound(0),
509 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800510 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700513 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mState(CLOSED),
515 mIsDeviceLinked(false),
516 mIsMainCamera(true),
517 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700518 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800519 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800520 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700521 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800522 mIsApInputUsedForHdrPlus(false),
523 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800524 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700525{
526 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCommon.init(gCamCapability[cameraId]);
528 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700529#ifndef USE_HAL_3_3
530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
531#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700533#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCameraDevice.common.close = close_camera_device;
535 mCameraDevice.ops = &mCameraOps;
536 mCameraDevice.priv = this;
537 gCamCapability[cameraId]->version = CAM_HAL_V3;
538 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
539 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
540 gCamCapability[cameraId]->min_num_pp_bufs = 3;
541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800544 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700545 mPendingLiveRequest = 0;
546 mCurrentRequestId = -1;
547 pthread_mutex_init(&mMutex, NULL);
548
549 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
550 mDefaultMetadata[i] = NULL;
551
552 // Getting system props of different kinds
553 char prop[PROPERTY_VALUE_MAX];
554 memset(prop, 0, sizeof(prop));
555 property_get("persist.camera.raw.dump", prop, "0");
556 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800557 property_get("persist.camera.hal3.force.hdr", prop, "0");
558 mForceHdrSnapshot = atoi(prop);
559
Thierry Strudel3d639192016-09-09 11:52:26 -0700560 if (mEnableRawDump)
561 LOGD("Raw dump from Camera HAL enabled");
562
563 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
564 memset(mLdafCalib, 0, sizeof(mLdafCalib));
565
566 memset(prop, 0, sizeof(prop));
567 property_get("persist.camera.tnr.preview", prop, "0");
568 m_bTnrPreview = (uint8_t)atoi(prop);
569
570 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800571 property_get("persist.camera.swtnr.preview", prop, "1");
572 m_bSwTnrPreview = (uint8_t)atoi(prop);
573
574 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700575 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 m_bTnrVideo = (uint8_t)atoi(prop);
577
578 memset(prop, 0, sizeof(prop));
579 property_get("persist.camera.avtimer.debug", prop, "0");
580 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800581 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700582
Thierry Strudel54dc9782017-02-15 12:12:10 -0800583 memset(prop, 0, sizeof(prop));
584 property_get("persist.camera.cacmode.disable", prop, "0");
585 m_cacModeDisabled = (uint8_t)atoi(prop);
586
Thierry Strudel3d639192016-09-09 11:52:26 -0700587 //Load and read GPU library.
588 lib_surface_utils = NULL;
589 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700590 mSurfaceStridePadding = CAM_PAD_TO_64;
591#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700592 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
593 if (lib_surface_utils) {
594 *(void **)&LINK_get_surface_pixel_alignment =
595 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
596 if (LINK_get_surface_pixel_alignment) {
597 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
598 }
599 dlclose(lib_surface_utils);
600 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700601#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000602 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
603 mPDSupported = (0 <= mPDIndex) ? true : false;
604
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700605 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700606}
607
608/*===========================================================================
609 * FUNCTION : ~QCamera3HardwareInterface
610 *
611 * DESCRIPTION: destructor of QCamera3HardwareInterface
612 *
613 * PARAMETERS : none
614 *
615 * RETURN : none
616 *==========================================================================*/
617QCamera3HardwareInterface::~QCamera3HardwareInterface()
618{
619 LOGD("E");
620
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800621 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700622
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800623 // Disable power hint and enable the perf lock for close camera
624 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
625 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
626
627 // unlink of dualcam during close camera
628 if (mIsDeviceLinked) {
629 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
630 &m_pDualCamCmdPtr->bundle_info;
631 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
632 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
633 pthread_mutex_lock(&gCamLock);
634
635 if (mIsMainCamera == 1) {
636 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
637 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
638 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
639 // related session id should be session id of linked session
640 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
641 } else {
642 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
643 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
644 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
645 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
646 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800647 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800648 pthread_mutex_unlock(&gCamLock);
649
650 rc = mCameraHandle->ops->set_dual_cam_cmd(
651 mCameraHandle->camera_handle);
652 if (rc < 0) {
653 LOGE("Dualcam: Unlink failed, but still proceed to close");
654 }
655 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700656
657 /* We need to stop all streams before deleting any stream */
658 if (mRawDumpChannel) {
659 mRawDumpChannel->stop();
660 }
661
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700662 if (mHdrPlusRawSrcChannel) {
663 mHdrPlusRawSrcChannel->stop();
664 }
665
Thierry Strudel3d639192016-09-09 11:52:26 -0700666 // NOTE: 'camera3_stream_t *' objects are already freed at
667 // this stage by the framework
668 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
669 it != mStreamInfo.end(); it++) {
670 QCamera3ProcessingChannel *channel = (*it)->channel;
671 if (channel) {
672 channel->stop();
673 }
674 }
675 if (mSupportChannel)
676 mSupportChannel->stop();
677
678 if (mAnalysisChannel) {
679 mAnalysisChannel->stop();
680 }
681 if (mMetadataChannel) {
682 mMetadataChannel->stop();
683 }
684 if (mChannelHandle) {
685 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
686 mChannelHandle);
687 LOGD("stopping channel %d", mChannelHandle);
688 }
689
690 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
691 it != mStreamInfo.end(); it++) {
692 QCamera3ProcessingChannel *channel = (*it)->channel;
693 if (channel)
694 delete channel;
695 free (*it);
696 }
697 if (mSupportChannel) {
698 delete mSupportChannel;
699 mSupportChannel = NULL;
700 }
701
702 if (mAnalysisChannel) {
703 delete mAnalysisChannel;
704 mAnalysisChannel = NULL;
705 }
706 if (mRawDumpChannel) {
707 delete mRawDumpChannel;
708 mRawDumpChannel = NULL;
709 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700710 if (mHdrPlusRawSrcChannel) {
711 delete mHdrPlusRawSrcChannel;
712 mHdrPlusRawSrcChannel = NULL;
713 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700714 if (mDummyBatchChannel) {
715 delete mDummyBatchChannel;
716 mDummyBatchChannel = NULL;
717 }
718
719 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800720 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700721
722 if (mMetadataChannel) {
723 delete mMetadataChannel;
724 mMetadataChannel = NULL;
725 }
726
727 /* Clean up all channels */
728 if (mCameraInitialized) {
729 if(!mFirstConfiguration){
730 //send the last unconfigure
731 cam_stream_size_info_t stream_config_info;
732 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
733 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
734 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800735 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700736 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700737 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
739 stream_config_info);
740 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
741 if (rc < 0) {
742 LOGE("set_parms failed for unconfigure");
743 }
744 }
745 deinitParameters();
746 }
747
748 if (mChannelHandle) {
749 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
750 mChannelHandle);
751 LOGH("deleting channel %d", mChannelHandle);
752 mChannelHandle = 0;
753 }
754
755 if (mState != CLOSED)
756 closeCamera();
757
758 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
759 req.mPendingBufferList.clear();
760 }
761 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 for (pendingRequestIterator i = mPendingRequestsList.begin();
763 i != mPendingRequestsList.end();) {
764 i = erasePendingRequest(i);
765 }
766 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
767 if (mDefaultMetadata[i])
768 free_camera_metadata(mDefaultMetadata[i]);
769
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800770 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700771
772 pthread_cond_destroy(&mRequestCond);
773
774 pthread_cond_destroy(&mBuffersCond);
775
776 pthread_mutex_destroy(&mMutex);
777 LOGD("X");
778}
779
780/*===========================================================================
781 * FUNCTION : erasePendingRequest
782 *
783 * DESCRIPTION: function to erase a desired pending request after freeing any
784 * allocated memory
785 *
786 * PARAMETERS :
787 * @i : iterator pointing to pending request to be erased
788 *
789 * RETURN : iterator pointing to the next request
790 *==========================================================================*/
791QCamera3HardwareInterface::pendingRequestIterator
792 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
793{
794 if (i->input_buffer != NULL) {
795 free(i->input_buffer);
796 i->input_buffer = NULL;
797 }
798 if (i->settings != NULL)
799 free_camera_metadata((camera_metadata_t*)i->settings);
800 return mPendingRequestsList.erase(i);
801}
802
803/*===========================================================================
804 * FUNCTION : camEvtHandle
805 *
806 * DESCRIPTION: Function registered to mm-camera-interface to handle events
807 *
808 * PARAMETERS :
809 * @camera_handle : interface layer camera handle
810 * @evt : ptr to event
811 * @user_data : user data ptr
812 *
813 * RETURN : none
814 *==========================================================================*/
815void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
816 mm_camera_event_t *evt,
817 void *user_data)
818{
819 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
820 if (obj && evt) {
821 switch(evt->server_event_type) {
822 case CAM_EVENT_TYPE_DAEMON_DIED:
823 pthread_mutex_lock(&obj->mMutex);
824 obj->mState = ERROR;
825 pthread_mutex_unlock(&obj->mMutex);
826 LOGE("Fatal, camera daemon died");
827 break;
828
829 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
830 LOGD("HAL got request pull from Daemon");
831 pthread_mutex_lock(&obj->mMutex);
832 obj->mWokenUpByDaemon = true;
833 obj->unblockRequestIfNecessary();
834 pthread_mutex_unlock(&obj->mMutex);
835 break;
836
837 default:
838 LOGW("Warning: Unhandled event %d",
839 evt->server_event_type);
840 break;
841 }
842 } else {
843 LOGE("NULL user_data/evt");
844 }
845}
846
847/*===========================================================================
848 * FUNCTION : openCamera
849 *
850 * DESCRIPTION: open camera
851 *
852 * PARAMETERS :
853 * @hw_device : double ptr for camera device struct
854 *
855 * RETURN : int32_t type of status
856 * NO_ERROR -- success
857 * none-zero failure code
858 *==========================================================================*/
859int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
860{
861 int rc = 0;
862 if (mState != CLOSED) {
863 *hw_device = NULL;
864 return PERMISSION_DENIED;
865 }
866
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700867 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800868 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
870 mCameraId);
871
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700872 if (mCameraHandle) {
873 LOGE("Failure: Camera already opened");
874 return ALREADY_EXISTS;
875 }
876
877 {
878 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700879 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700880 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700881 rc = gEaselManagerClient->resume();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700882 if (rc != 0) {
883 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
884 return rc;
885 }
886 }
887 }
888
Thierry Strudel3d639192016-09-09 11:52:26 -0700889 rc = openCamera();
890 if (rc == 0) {
891 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800892 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700893 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700894
895 // Suspend Easel because opening camera failed.
896 {
897 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700898 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
899 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700900 if (suspendErr != 0) {
901 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
902 strerror(-suspendErr), suspendErr);
903 }
904 }
905 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800906 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700907
Thierry Strudel3d639192016-09-09 11:52:26 -0700908 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
909 mCameraId, rc);
910
911 if (rc == NO_ERROR) {
912 mState = OPENED;
913 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800914
Thierry Strudel3d639192016-09-09 11:52:26 -0700915 return rc;
916}
917
918/*===========================================================================
919 * FUNCTION : openCamera
920 *
921 * DESCRIPTION: open camera
922 *
923 * PARAMETERS : none
924 *
925 * RETURN : int32_t type of status
926 * NO_ERROR -- success
927 * none-zero failure code
928 *==========================================================================*/
929int QCamera3HardwareInterface::openCamera()
930{
931 int rc = 0;
932 char value[PROPERTY_VALUE_MAX];
933
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800934 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800935
Thierry Strudel3d639192016-09-09 11:52:26 -0700936 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
937 if (rc < 0) {
938 LOGE("Failed to reserve flash for camera id: %d",
939 mCameraId);
940 return UNKNOWN_ERROR;
941 }
942
943 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
944 if (rc) {
945 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
946 return rc;
947 }
948
949 if (!mCameraHandle) {
950 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
951 return -ENODEV;
952 }
953
954 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
955 camEvtHandle, (void *)this);
956
957 if (rc < 0) {
958 LOGE("Error, failed to register event callback");
959 /* Not closing camera here since it is already handled in destructor */
960 return FAILED_TRANSACTION;
961 }
962
963 mExifParams.debug_params =
964 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
965 if (mExifParams.debug_params) {
966 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
967 } else {
968 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
969 return NO_MEMORY;
970 }
971 mFirstConfiguration = true;
972
973 //Notify display HAL that a camera session is active.
974 //But avoid calling the same during bootup because camera service might open/close
975 //cameras at boot time during its initialization and display service will also internally
976 //wait for camera service to initialize first while calling this display API, resulting in a
977 //deadlock situation. Since boot time camera open/close calls are made only to fetch
978 //capabilities, no need of this display bw optimization.
979 //Use "service.bootanim.exit" property to know boot status.
980 property_get("service.bootanim.exit", value, "0");
981 if (atoi(value) == 1) {
982 pthread_mutex_lock(&gCamLock);
983 if (gNumCameraSessions++ == 0) {
984 setCameraLaunchStatus(true);
985 }
986 pthread_mutex_unlock(&gCamLock);
987 }
988
989 //fill the session id needed while linking dual cam
990 pthread_mutex_lock(&gCamLock);
991 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
992 &sessionId[mCameraId]);
993 pthread_mutex_unlock(&gCamLock);
994
995 if (rc < 0) {
996 LOGE("Error, failed to get sessiion id");
997 return UNKNOWN_ERROR;
998 } else {
999 //Allocate related cam sync buffer
1000 //this is needed for the payload that goes along with bundling cmd for related
1001 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001002 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1003 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001004 if(rc != OK) {
1005 rc = NO_MEMORY;
1006 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1007 return NO_MEMORY;
1008 }
1009
1010 //Map memory for related cam sync buffer
1011 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001012 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1013 m_pDualCamCmdHeap->getFd(0),
1014 sizeof(cam_dual_camera_cmd_info_t),
1015 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001016 if(rc < 0) {
1017 LOGE("Dualcam: failed to map Related cam sync buffer");
1018 rc = FAILED_TRANSACTION;
1019 return NO_MEMORY;
1020 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001021 m_pDualCamCmdPtr =
1022 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001023 }
1024
1025 LOGH("mCameraId=%d",mCameraId);
1026
1027 return NO_ERROR;
1028}
1029
1030/*===========================================================================
1031 * FUNCTION : closeCamera
1032 *
1033 * DESCRIPTION: close camera
1034 *
1035 * PARAMETERS : none
1036 *
1037 * RETURN : int32_t type of status
1038 * NO_ERROR -- success
1039 * none-zero failure code
1040 *==========================================================================*/
1041int QCamera3HardwareInterface::closeCamera()
1042{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001043 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001044 int rc = NO_ERROR;
1045 char value[PROPERTY_VALUE_MAX];
1046
1047 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1048 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001049
1050 // unmap memory for related cam sync buffer
1051 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001052 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 if (NULL != m_pDualCamCmdHeap) {
1054 m_pDualCamCmdHeap->deallocate();
1055 delete m_pDualCamCmdHeap;
1056 m_pDualCamCmdHeap = NULL;
1057 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001058 }
1059
Thierry Strudel3d639192016-09-09 11:52:26 -07001060 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1061 mCameraHandle = NULL;
1062
1063 //reset session id to some invalid id
1064 pthread_mutex_lock(&gCamLock);
1065 sessionId[mCameraId] = 0xDEADBEEF;
1066 pthread_mutex_unlock(&gCamLock);
1067
1068 //Notify display HAL that there is no active camera session
1069 //but avoid calling the same during bootup. Refer to openCamera
1070 //for more details.
1071 property_get("service.bootanim.exit", value, "0");
1072 if (atoi(value) == 1) {
1073 pthread_mutex_lock(&gCamLock);
1074 if (--gNumCameraSessions == 0) {
1075 setCameraLaunchStatus(false);
1076 }
1077 pthread_mutex_unlock(&gCamLock);
1078 }
1079
Thierry Strudel3d639192016-09-09 11:52:26 -07001080 if (mExifParams.debug_params) {
1081 free(mExifParams.debug_params);
1082 mExifParams.debug_params = NULL;
1083 }
1084 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1085 LOGW("Failed to release flash for camera id: %d",
1086 mCameraId);
1087 }
1088 mState = CLOSED;
1089 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1090 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001091
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001092 {
1093 Mutex::Autolock l(gHdrPlusClientLock);
1094 if (gHdrPlusClient != nullptr) {
1095 // Disable HDR+ mode.
1096 disableHdrPlusModeLocked();
1097 // Disconnect Easel if it's connected.
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001098 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001099 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001100 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001101
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001102 if (EaselManagerClientOpened) {
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001103 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001104 if (rc != 0) {
1105 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1106 }
1107
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001108 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001109 if (rc != 0) {
1110 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1111 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001112 }
1113 }
1114
Thierry Strudel3d639192016-09-09 11:52:26 -07001115 return rc;
1116}
1117
1118/*===========================================================================
1119 * FUNCTION : initialize
1120 *
1121 * DESCRIPTION: Initialize frameworks callback functions
1122 *
1123 * PARAMETERS :
1124 * @callback_ops : callback function to frameworks
1125 *
1126 * RETURN :
1127 *
1128 *==========================================================================*/
1129int QCamera3HardwareInterface::initialize(
1130 const struct camera3_callback_ops *callback_ops)
1131{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001132 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001133 int rc;
1134
1135 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1136 pthread_mutex_lock(&mMutex);
1137
1138 // Validate current state
1139 switch (mState) {
1140 case OPENED:
1141 /* valid state */
1142 break;
1143 default:
1144 LOGE("Invalid state %d", mState);
1145 rc = -ENODEV;
1146 goto err1;
1147 }
1148
1149 rc = initParameters();
1150 if (rc < 0) {
1151 LOGE("initParamters failed %d", rc);
1152 goto err1;
1153 }
1154 mCallbackOps = callback_ops;
1155
1156 mChannelHandle = mCameraHandle->ops->add_channel(
1157 mCameraHandle->camera_handle, NULL, NULL, this);
1158 if (mChannelHandle == 0) {
1159 LOGE("add_channel failed");
1160 rc = -ENOMEM;
1161 pthread_mutex_unlock(&mMutex);
1162 return rc;
1163 }
1164
1165 pthread_mutex_unlock(&mMutex);
1166 mCameraInitialized = true;
1167 mState = INITIALIZED;
1168 LOGI("X");
1169 return 0;
1170
1171err1:
1172 pthread_mutex_unlock(&mMutex);
1173 return rc;
1174}
1175
1176/*===========================================================================
1177 * FUNCTION : validateStreamDimensions
1178 *
1179 * DESCRIPTION: Check if the configuration requested are those advertised
1180 *
1181 * PARAMETERS :
1182 * @stream_list : streams to be configured
1183 *
1184 * RETURN :
1185 *
1186 *==========================================================================*/
1187int QCamera3HardwareInterface::validateStreamDimensions(
1188 camera3_stream_configuration_t *streamList)
1189{
1190 int rc = NO_ERROR;
1191 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001192 uint32_t depthWidth = 0;
1193 uint32_t depthHeight = 0;
1194 if (mPDSupported) {
1195 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1196 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1197 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001198
1199 camera3_stream_t *inputStream = NULL;
1200 /*
1201 * Loop through all streams to find input stream if it exists*
1202 */
1203 for (size_t i = 0; i< streamList->num_streams; i++) {
1204 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1205 if (inputStream != NULL) {
1206 LOGE("Error, Multiple input streams requested");
1207 return -EINVAL;
1208 }
1209 inputStream = streamList->streams[i];
1210 }
1211 }
1212 /*
1213 * Loop through all streams requested in configuration
1214 * Check if unsupported sizes have been requested on any of them
1215 */
1216 for (size_t j = 0; j < streamList->num_streams; j++) {
1217 bool sizeFound = false;
1218 camera3_stream_t *newStream = streamList->streams[j];
1219
1220 uint32_t rotatedHeight = newStream->height;
1221 uint32_t rotatedWidth = newStream->width;
1222 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1223 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1224 rotatedHeight = newStream->width;
1225 rotatedWidth = newStream->height;
1226 }
1227
1228 /*
1229 * Sizes are different for each type of stream format check against
1230 * appropriate table.
1231 */
1232 switch (newStream->format) {
1233 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1234 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1235 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001236 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1237 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1238 mPDSupported) {
1239 if ((depthWidth == newStream->width) &&
1240 (depthHeight == newStream->height)) {
1241 sizeFound = true;
1242 }
1243 break;
1244 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001245 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1246 for (size_t i = 0; i < count; i++) {
1247 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1248 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1249 sizeFound = true;
1250 break;
1251 }
1252 }
1253 break;
1254 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1256 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001257 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001258 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001259 if ((depthSamplesCount == newStream->width) &&
1260 (1 == newStream->height)) {
1261 sizeFound = true;
1262 }
1263 break;
1264 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001265 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1266 /* Verify set size against generated sizes table */
1267 for (size_t i = 0; i < count; i++) {
1268 if (((int32_t)rotatedWidth ==
1269 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1270 ((int32_t)rotatedHeight ==
1271 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1272 sizeFound = true;
1273 break;
1274 }
1275 }
1276 break;
1277 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1278 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1279 default:
1280 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1281 || newStream->stream_type == CAMERA3_STREAM_INPUT
1282 || IS_USAGE_ZSL(newStream->usage)) {
1283 if (((int32_t)rotatedWidth ==
1284 gCamCapability[mCameraId]->active_array_size.width) &&
1285 ((int32_t)rotatedHeight ==
1286 gCamCapability[mCameraId]->active_array_size.height)) {
1287 sizeFound = true;
1288 break;
1289 }
1290 /* We could potentially break here to enforce ZSL stream
1291 * set from frameworks always is full active array size
1292 * but it is not clear from the spc if framework will always
1293 * follow that, also we have logic to override to full array
1294 * size, so keeping the logic lenient at the moment
1295 */
1296 }
1297 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1298 MAX_SIZES_CNT);
1299 for (size_t i = 0; i < count; i++) {
1300 if (((int32_t)rotatedWidth ==
1301 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1302 ((int32_t)rotatedHeight ==
1303 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1304 sizeFound = true;
1305 break;
1306 }
1307 }
1308 break;
1309 } /* End of switch(newStream->format) */
1310
1311 /* We error out even if a single stream has unsupported size set */
1312 if (!sizeFound) {
1313 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1314 rotatedWidth, rotatedHeight, newStream->format,
1315 gCamCapability[mCameraId]->active_array_size.width,
1316 gCamCapability[mCameraId]->active_array_size.height);
1317 rc = -EINVAL;
1318 break;
1319 }
1320 } /* End of for each stream */
1321 return rc;
1322}
1323
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001324/*===========================================================================
1325 * FUNCTION : validateUsageFlags
1326 *
1327 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1328 *
1329 * PARAMETERS :
1330 * @stream_list : streams to be configured
1331 *
1332 * RETURN :
1333 * NO_ERROR if the usage flags are supported
1334 * error code if usage flags are not supported
1335 *
1336 *==========================================================================*/
1337int QCamera3HardwareInterface::validateUsageFlags(
1338 const camera3_stream_configuration_t* streamList)
1339{
1340 for (size_t j = 0; j < streamList->num_streams; j++) {
1341 const camera3_stream_t *newStream = streamList->streams[j];
1342
1343 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1344 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1345 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1346 continue;
1347 }
1348
Jason Leec4cf5032017-05-24 18:31:41 -07001349 // Here we only care whether it's EIS3 or not
1350 char is_type_value[PROPERTY_VALUE_MAX];
1351 property_get("persist.camera.is_type", is_type_value, "4");
1352 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1353 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1354 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1355 isType = IS_TYPE_NONE;
1356
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001357 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1358 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1359 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1360 bool forcePreviewUBWC = true;
1361 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1362 forcePreviewUBWC = false;
1363 }
1364 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001365 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001366 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001367 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001369 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001370
1371 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1372 // So color spaces will always match.
1373
1374 // Check whether underlying formats of shared streams match.
1375 if (isVideo && isPreview && videoFormat != previewFormat) {
1376 LOGE("Combined video and preview usage flag is not supported");
1377 return -EINVAL;
1378 }
1379 if (isPreview && isZSL && previewFormat != zslFormat) {
1380 LOGE("Combined preview and zsl usage flag is not supported");
1381 return -EINVAL;
1382 }
1383 if (isVideo && isZSL && videoFormat != zslFormat) {
1384 LOGE("Combined video and zsl usage flag is not supported");
1385 return -EINVAL;
1386 }
1387 }
1388 return NO_ERROR;
1389}
1390
1391/*===========================================================================
1392 * FUNCTION : validateUsageFlagsForEis
1393 *
1394 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 *
1399 * RETURN :
1400 * NO_ERROR if the usage flags are supported
1401 * error code if usage flags are not supported
1402 *
1403 *==========================================================================*/
1404int QCamera3HardwareInterface::validateUsageFlagsForEis(
1405 const camera3_stream_configuration_t* streamList)
1406{
1407 for (size_t j = 0; j < streamList->num_streams; j++) {
1408 const camera3_stream_t *newStream = streamList->streams[j];
1409
1410 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1411 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1412
1413 // Because EIS is "hard-coded" for certain use case, and current
1414 // implementation doesn't support shared preview and video on the same
1415 // stream, return failure if EIS is forced on.
1416 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1417 LOGE("Combined video and preview usage flag is not supported due to EIS");
1418 return -EINVAL;
1419 }
1420 }
1421 return NO_ERROR;
1422}
1423
Thierry Strudel3d639192016-09-09 11:52:26 -07001424/*==============================================================================
1425 * FUNCTION : isSupportChannelNeeded
1426 *
1427 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1428 *
1429 * PARAMETERS :
1430 * @stream_list : streams to be configured
1431 * @stream_config_info : the config info for streams to be configured
1432 *
1433 * RETURN : Boolen true/false decision
1434 *
1435 *==========================================================================*/
1436bool QCamera3HardwareInterface::isSupportChannelNeeded(
1437 camera3_stream_configuration_t *streamList,
1438 cam_stream_size_info_t stream_config_info)
1439{
1440 uint32_t i;
1441 bool pprocRequested = false;
1442 /* Check for conditions where PProc pipeline does not have any streams*/
1443 for (i = 0; i < stream_config_info.num_streams; i++) {
1444 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1445 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1446 pprocRequested = true;
1447 break;
1448 }
1449 }
1450
1451 if (pprocRequested == false )
1452 return true;
1453
1454 /* Dummy stream needed if only raw or jpeg streams present */
1455 for (i = 0; i < streamList->num_streams; i++) {
1456 switch(streamList->streams[i]->format) {
1457 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1458 case HAL_PIXEL_FORMAT_RAW10:
1459 case HAL_PIXEL_FORMAT_RAW16:
1460 case HAL_PIXEL_FORMAT_BLOB:
1461 break;
1462 default:
1463 return false;
1464 }
1465 }
1466 return true;
1467}
1468
1469/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001470 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001471 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001472 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001473 *
1474 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001475 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001476 *
1477 * RETURN : int32_t type of status
1478 * NO_ERROR -- success
1479 * none-zero failure code
1480 *
1481 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001482int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001483{
1484 int32_t rc = NO_ERROR;
1485
1486 cam_dimension_t max_dim = {0, 0};
1487 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1488 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1489 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1490 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1491 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1492 }
1493
1494 clear_metadata_buffer(mParameters);
1495
1496 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1497 max_dim);
1498 if (rc != NO_ERROR) {
1499 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1500 return rc;
1501 }
1502
1503 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1504 if (rc != NO_ERROR) {
1505 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1506 return rc;
1507 }
1508
1509 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001510 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001511
1512 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1513 mParameters);
1514 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001515 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001516 return rc;
1517 }
1518
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001519 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001520 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1521 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1522 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1523 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1524 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001525
1526 return rc;
1527}
1528
1529/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001530 * FUNCTION : getCurrentSensorModeInfo
1531 *
1532 * DESCRIPTION: Get sensor mode information that is currently selected.
1533 *
1534 * PARAMETERS :
1535 * @sensorModeInfo : sensor mode information (output)
1536 *
1537 * RETURN : int32_t type of status
1538 * NO_ERROR -- success
1539 * none-zero failure code
1540 *
1541 *==========================================================================*/
1542int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1543{
1544 int32_t rc = NO_ERROR;
1545
1546 clear_metadata_buffer(mParameters);
1547 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1548
1549 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1550 mParameters);
1551 if (rc != NO_ERROR) {
1552 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1553 return rc;
1554 }
1555
1556 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1557 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1558 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1559 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1560 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1561 sensorModeInfo.num_raw_bits);
1562
1563 return rc;
1564}
1565
1566/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001567 * FUNCTION : addToPPFeatureMask
1568 *
1569 * DESCRIPTION: add additional features to pp feature mask based on
1570 * stream type and usecase
1571 *
1572 * PARAMETERS :
1573 * @stream_format : stream type for feature mask
1574 * @stream_idx : stream idx within postprocess_mask list to change
1575 *
1576 * RETURN : NULL
1577 *
1578 *==========================================================================*/
1579void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1580 uint32_t stream_idx)
1581{
1582 char feature_mask_value[PROPERTY_VALUE_MAX];
1583 cam_feature_mask_t feature_mask;
1584 int args_converted;
1585 int property_len;
1586
1587 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001588#ifdef _LE_CAMERA_
1589 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1590 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1591 property_len = property_get("persist.camera.hal3.feature",
1592 feature_mask_value, swtnr_feature_mask_value);
1593#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001594 property_len = property_get("persist.camera.hal3.feature",
1595 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001596#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001597 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1598 (feature_mask_value[1] == 'x')) {
1599 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1600 } else {
1601 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1602 }
1603 if (1 != args_converted) {
1604 feature_mask = 0;
1605 LOGE("Wrong feature mask %s", feature_mask_value);
1606 return;
1607 }
1608
1609 switch (stream_format) {
1610 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1611 /* Add LLVD to pp feature mask only if video hint is enabled */
1612 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1613 mStreamConfigInfo.postprocess_mask[stream_idx]
1614 |= CAM_QTI_FEATURE_SW_TNR;
1615 LOGH("Added SW TNR to pp feature mask");
1616 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1617 mStreamConfigInfo.postprocess_mask[stream_idx]
1618 |= CAM_QCOM_FEATURE_LLVD;
1619 LOGH("Added LLVD SeeMore to pp feature mask");
1620 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001621 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1622 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1623 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1624 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001625 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1626 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1627 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1628 CAM_QTI_FEATURE_BINNING_CORRECTION;
1629 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001630 break;
1631 }
1632 default:
1633 break;
1634 }
1635 LOGD("PP feature mask %llx",
1636 mStreamConfigInfo.postprocess_mask[stream_idx]);
1637}
1638
1639/*==============================================================================
1640 * FUNCTION : updateFpsInPreviewBuffer
1641 *
1642 * DESCRIPTION: update FPS information in preview buffer.
1643 *
1644 * PARAMETERS :
1645 * @metadata : pointer to metadata buffer
1646 * @frame_number: frame_number to look for in pending buffer list
1647 *
1648 * RETURN : None
1649 *
1650 *==========================================================================*/
1651void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1652 uint32_t frame_number)
1653{
1654 // Mark all pending buffers for this particular request
1655 // with corresponding framerate information
1656 for (List<PendingBuffersInRequest>::iterator req =
1657 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1658 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1659 for(List<PendingBufferInfo>::iterator j =
1660 req->mPendingBufferList.begin();
1661 j != req->mPendingBufferList.end(); j++) {
1662 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1663 if ((req->frame_number == frame_number) &&
1664 (channel->getStreamTypeMask() &
1665 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1666 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1667 CAM_INTF_PARM_FPS_RANGE, metadata) {
1668 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1669 struct private_handle_t *priv_handle =
1670 (struct private_handle_t *)(*(j->buffer));
1671 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1672 }
1673 }
1674 }
1675 }
1676}
1677
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001678/*==============================================================================
1679 * FUNCTION : updateTimeStampInPendingBuffers
1680 *
1681 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1682 * of a frame number
1683 *
1684 * PARAMETERS :
1685 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1686 * @timestamp : timestamp to be set
1687 *
1688 * RETURN : None
1689 *
1690 *==========================================================================*/
1691void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1692 uint32_t frameNumber, nsecs_t timestamp)
1693{
1694 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1695 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1696 if (req->frame_number != frameNumber)
1697 continue;
1698
1699 for (auto k = req->mPendingBufferList.begin();
1700 k != req->mPendingBufferList.end(); k++ ) {
1701 struct private_handle_t *priv_handle =
1702 (struct private_handle_t *) (*(k->buffer));
1703 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1704 }
1705 }
1706 return;
1707}
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709/*===========================================================================
1710 * FUNCTION : configureStreams
1711 *
1712 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1713 * and output streams.
1714 *
1715 * PARAMETERS :
1716 * @stream_list : streams to be configured
1717 *
1718 * RETURN :
1719 *
1720 *==========================================================================*/
1721int QCamera3HardwareInterface::configureStreams(
1722 camera3_stream_configuration_t *streamList)
1723{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001724 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001725 int rc = 0;
1726
1727 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001728 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001729 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001730 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001731
1732 return rc;
1733}
1734
1735/*===========================================================================
1736 * FUNCTION : configureStreamsPerfLocked
1737 *
1738 * DESCRIPTION: configureStreams while perfLock is held.
1739 *
1740 * PARAMETERS :
1741 * @stream_list : streams to be configured
1742 *
1743 * RETURN : int32_t type of status
1744 * NO_ERROR -- success
1745 * none-zero failure code
1746 *==========================================================================*/
1747int QCamera3HardwareInterface::configureStreamsPerfLocked(
1748 camera3_stream_configuration_t *streamList)
1749{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001750 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001751 int rc = 0;
1752
1753 // Sanity check stream_list
1754 if (streamList == NULL) {
1755 LOGE("NULL stream configuration");
1756 return BAD_VALUE;
1757 }
1758 if (streamList->streams == NULL) {
1759 LOGE("NULL stream list");
1760 return BAD_VALUE;
1761 }
1762
1763 if (streamList->num_streams < 1) {
1764 LOGE("Bad number of streams requested: %d",
1765 streamList->num_streams);
1766 return BAD_VALUE;
1767 }
1768
1769 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1770 LOGE("Maximum number of streams %d exceeded: %d",
1771 MAX_NUM_STREAMS, streamList->num_streams);
1772 return BAD_VALUE;
1773 }
1774
Jason Leec4cf5032017-05-24 18:31:41 -07001775 mOpMode = streamList->operation_mode;
1776 LOGD("mOpMode: %d", mOpMode);
1777
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001778 rc = validateUsageFlags(streamList);
1779 if (rc != NO_ERROR) {
1780 return rc;
1781 }
1782
Thierry Strudel3d639192016-09-09 11:52:26 -07001783 /* first invalidate all the steams in the mStreamList
1784 * if they appear again, they will be validated */
1785 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1786 it != mStreamInfo.end(); it++) {
1787 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1788 if (channel) {
1789 channel->stop();
1790 }
1791 (*it)->status = INVALID;
1792 }
1793
1794 if (mRawDumpChannel) {
1795 mRawDumpChannel->stop();
1796 delete mRawDumpChannel;
1797 mRawDumpChannel = NULL;
1798 }
1799
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001800 if (mHdrPlusRawSrcChannel) {
1801 mHdrPlusRawSrcChannel->stop();
1802 delete mHdrPlusRawSrcChannel;
1803 mHdrPlusRawSrcChannel = NULL;
1804 }
1805
Thierry Strudel3d639192016-09-09 11:52:26 -07001806 if (mSupportChannel)
1807 mSupportChannel->stop();
1808
1809 if (mAnalysisChannel) {
1810 mAnalysisChannel->stop();
1811 }
1812 if (mMetadataChannel) {
1813 /* If content of mStreamInfo is not 0, there is metadata stream */
1814 mMetadataChannel->stop();
1815 }
1816 if (mChannelHandle) {
1817 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1818 mChannelHandle);
1819 LOGD("stopping channel %d", mChannelHandle);
1820 }
1821
1822 pthread_mutex_lock(&mMutex);
1823
1824 // Check state
1825 switch (mState) {
1826 case INITIALIZED:
1827 case CONFIGURED:
1828 case STARTED:
1829 /* valid state */
1830 break;
1831 default:
1832 LOGE("Invalid state %d", mState);
1833 pthread_mutex_unlock(&mMutex);
1834 return -ENODEV;
1835 }
1836
1837 /* Check whether we have video stream */
1838 m_bIs4KVideo = false;
1839 m_bIsVideo = false;
1840 m_bEisSupportedSize = false;
1841 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001842 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001843 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001844 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001845 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001846 uint32_t videoWidth = 0U;
1847 uint32_t videoHeight = 0U;
1848 size_t rawStreamCnt = 0;
1849 size_t stallStreamCnt = 0;
1850 size_t processedStreamCnt = 0;
1851 // Number of streams on ISP encoder path
1852 size_t numStreamsOnEncoder = 0;
1853 size_t numYuv888OnEncoder = 0;
1854 bool bYuv888OverrideJpeg = false;
1855 cam_dimension_t largeYuv888Size = {0, 0};
1856 cam_dimension_t maxViewfinderSize = {0, 0};
1857 bool bJpegExceeds4K = false;
1858 bool bJpegOnEncoder = false;
1859 bool bUseCommonFeatureMask = false;
1860 cam_feature_mask_t commonFeatureMask = 0;
1861 bool bSmallJpegSize = false;
1862 uint32_t width_ratio;
1863 uint32_t height_ratio;
1864 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1865 camera3_stream_t *inputStream = NULL;
1866 bool isJpeg = false;
1867 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001868 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001869 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001870
1871 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1872
1873 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001874 uint8_t eis_prop_set;
1875 uint32_t maxEisWidth = 0;
1876 uint32_t maxEisHeight = 0;
1877
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001878 // Initialize all instant AEC related variables
1879 mInstantAEC = false;
1880 mResetInstantAEC = false;
1881 mInstantAECSettledFrameNumber = 0;
1882 mAecSkipDisplayFrameBound = 0;
1883 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001884 mCurrFeatureState = 0;
1885 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001886
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1888
1889 size_t count = IS_TYPE_MAX;
1890 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1891 for (size_t i = 0; i < count; i++) {
1892 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001893 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1894 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001895 break;
1896 }
1897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001898
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001899 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001900 maxEisWidth = MAX_EIS_WIDTH;
1901 maxEisHeight = MAX_EIS_HEIGHT;
1902 }
1903
1904 /* EIS setprop control */
1905 char eis_prop[PROPERTY_VALUE_MAX];
1906 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001907 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001908 eis_prop_set = (uint8_t)atoi(eis_prop);
1909
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001910 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001911 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1912
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001913 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1914 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001915
Thierry Strudel3d639192016-09-09 11:52:26 -07001916 /* stream configurations */
1917 for (size_t i = 0; i < streamList->num_streams; i++) {
1918 camera3_stream_t *newStream = streamList->streams[i];
1919 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1920 "height = %d, rotation = %d, usage = 0x%x",
1921 i, newStream->stream_type, newStream->format,
1922 newStream->width, newStream->height, newStream->rotation,
1923 newStream->usage);
1924 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1925 newStream->stream_type == CAMERA3_STREAM_INPUT){
1926 isZsl = true;
1927 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001928 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1929 IS_USAGE_PREVIEW(newStream->usage)) {
1930 isPreview = true;
1931 }
1932
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1934 inputStream = newStream;
1935 }
1936
Emilian Peev7650c122017-01-19 08:24:33 -08001937 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1938 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001939 isJpeg = true;
1940 jpegSize.width = newStream->width;
1941 jpegSize.height = newStream->height;
1942 if (newStream->width > VIDEO_4K_WIDTH ||
1943 newStream->height > VIDEO_4K_HEIGHT)
1944 bJpegExceeds4K = true;
1945 }
1946
1947 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1948 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1949 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001950 // In HAL3 we can have multiple different video streams.
1951 // The variables video width and height are used below as
1952 // dimensions of the biggest of them
1953 if (videoWidth < newStream->width ||
1954 videoHeight < newStream->height) {
1955 videoWidth = newStream->width;
1956 videoHeight = newStream->height;
1957 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001958 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1959 (VIDEO_4K_HEIGHT <= newStream->height)) {
1960 m_bIs4KVideo = true;
1961 }
1962 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1963 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001964
Thierry Strudel3d639192016-09-09 11:52:26 -07001965 }
1966 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1967 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1968 switch (newStream->format) {
1969 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001970 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1971 depthPresent = true;
1972 break;
1973 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001974 stallStreamCnt++;
1975 if (isOnEncoder(maxViewfinderSize, newStream->width,
1976 newStream->height)) {
1977 numStreamsOnEncoder++;
1978 bJpegOnEncoder = true;
1979 }
1980 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1981 newStream->width);
1982 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1983 newStream->height);;
1984 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1985 "FATAL: max_downscale_factor cannot be zero and so assert");
1986 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1987 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1988 LOGH("Setting small jpeg size flag to true");
1989 bSmallJpegSize = true;
1990 }
1991 break;
1992 case HAL_PIXEL_FORMAT_RAW10:
1993 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1994 case HAL_PIXEL_FORMAT_RAW16:
1995 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001996 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1997 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1998 pdStatCount++;
1999 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002000 break;
2001 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2002 processedStreamCnt++;
2003 if (isOnEncoder(maxViewfinderSize, newStream->width,
2004 newStream->height)) {
2005 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2006 !IS_USAGE_ZSL(newStream->usage)) {
2007 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2008 }
2009 numStreamsOnEncoder++;
2010 }
2011 break;
2012 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2013 processedStreamCnt++;
2014 if (isOnEncoder(maxViewfinderSize, newStream->width,
2015 newStream->height)) {
2016 // If Yuv888 size is not greater than 4K, set feature mask
2017 // to SUPERSET so that it support concurrent request on
2018 // YUV and JPEG.
2019 if (newStream->width <= VIDEO_4K_WIDTH &&
2020 newStream->height <= VIDEO_4K_HEIGHT) {
2021 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2022 }
2023 numStreamsOnEncoder++;
2024 numYuv888OnEncoder++;
2025 largeYuv888Size.width = newStream->width;
2026 largeYuv888Size.height = newStream->height;
2027 }
2028 break;
2029 default:
2030 processedStreamCnt++;
2031 if (isOnEncoder(maxViewfinderSize, newStream->width,
2032 newStream->height)) {
2033 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2034 numStreamsOnEncoder++;
2035 }
2036 break;
2037 }
2038
2039 }
2040 }
2041
2042 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2043 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2044 !m_bIsVideo) {
2045 m_bEisEnable = false;
2046 }
2047
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002048 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2049 pthread_mutex_unlock(&mMutex);
2050 return -EINVAL;
2051 }
2052
Thierry Strudel54dc9782017-02-15 12:12:10 -08002053 uint8_t forceEnableTnr = 0;
2054 char tnr_prop[PROPERTY_VALUE_MAX];
2055 memset(tnr_prop, 0, sizeof(tnr_prop));
2056 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2057 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2058
Thierry Strudel3d639192016-09-09 11:52:26 -07002059 /* Logic to enable/disable TNR based on specific config size/etc.*/
2060 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002061 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2062 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002063 else if (forceEnableTnr)
2064 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002065
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002066 char videoHdrProp[PROPERTY_VALUE_MAX];
2067 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2068 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2069 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2070
2071 if (hdr_mode_prop == 1 && m_bIsVideo &&
2072 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2073 m_bVideoHdrEnabled = true;
2074 else
2075 m_bVideoHdrEnabled = false;
2076
2077
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 /* Check if num_streams is sane */
2079 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2080 rawStreamCnt > MAX_RAW_STREAMS ||
2081 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2082 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2083 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2084 pthread_mutex_unlock(&mMutex);
2085 return -EINVAL;
2086 }
2087 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002088 if (isZsl && m_bIs4KVideo) {
2089 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002090 pthread_mutex_unlock(&mMutex);
2091 return -EINVAL;
2092 }
2093 /* Check if stream sizes are sane */
2094 if (numStreamsOnEncoder > 2) {
2095 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2096 pthread_mutex_unlock(&mMutex);
2097 return -EINVAL;
2098 } else if (1 < numStreamsOnEncoder){
2099 bUseCommonFeatureMask = true;
2100 LOGH("Multiple streams above max viewfinder size, common mask needed");
2101 }
2102
2103 /* Check if BLOB size is greater than 4k in 4k recording case */
2104 if (m_bIs4KVideo && bJpegExceeds4K) {
2105 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2106 pthread_mutex_unlock(&mMutex);
2107 return -EINVAL;
2108 }
2109
Emilian Peev7650c122017-01-19 08:24:33 -08002110 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2111 depthPresent) {
2112 LOGE("HAL doesn't support depth streams in HFR mode!");
2113 pthread_mutex_unlock(&mMutex);
2114 return -EINVAL;
2115 }
2116
Thierry Strudel3d639192016-09-09 11:52:26 -07002117 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2118 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2119 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2120 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2121 // configurations:
2122 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2123 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2124 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2125 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2126 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2127 __func__);
2128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131
2132 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2133 // the YUV stream's size is greater or equal to the JPEG size, set common
2134 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2135 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2136 jpegSize.width, jpegSize.height) &&
2137 largeYuv888Size.width > jpegSize.width &&
2138 largeYuv888Size.height > jpegSize.height) {
2139 bYuv888OverrideJpeg = true;
2140 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2141 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2142 }
2143
2144 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2145 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2146 commonFeatureMask);
2147 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2148 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2149
2150 rc = validateStreamDimensions(streamList);
2151 if (rc == NO_ERROR) {
2152 rc = validateStreamRotations(streamList);
2153 }
2154 if (rc != NO_ERROR) {
2155 LOGE("Invalid stream configuration requested!");
2156 pthread_mutex_unlock(&mMutex);
2157 return rc;
2158 }
2159
Emilian Peev0f3c3162017-03-15 12:57:46 +00002160 if (1 < pdStatCount) {
2161 LOGE("HAL doesn't support multiple PD streams");
2162 pthread_mutex_unlock(&mMutex);
2163 return -EINVAL;
2164 }
2165
2166 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2167 (1 == pdStatCount)) {
2168 LOGE("HAL doesn't support PD streams in HFR mode!");
2169 pthread_mutex_unlock(&mMutex);
2170 return -EINVAL;
2171 }
2172
Thierry Strudel3d639192016-09-09 11:52:26 -07002173 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2174 for (size_t i = 0; i < streamList->num_streams; i++) {
2175 camera3_stream_t *newStream = streamList->streams[i];
2176 LOGH("newStream type = %d, stream format = %d "
2177 "stream size : %d x %d, stream rotation = %d",
2178 newStream->stream_type, newStream->format,
2179 newStream->width, newStream->height, newStream->rotation);
2180 //if the stream is in the mStreamList validate it
2181 bool stream_exists = false;
2182 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2183 it != mStreamInfo.end(); it++) {
2184 if ((*it)->stream == newStream) {
2185 QCamera3ProcessingChannel *channel =
2186 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2187 stream_exists = true;
2188 if (channel)
2189 delete channel;
2190 (*it)->status = VALID;
2191 (*it)->stream->priv = NULL;
2192 (*it)->channel = NULL;
2193 }
2194 }
2195 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2196 //new stream
2197 stream_info_t* stream_info;
2198 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2199 if (!stream_info) {
2200 LOGE("Could not allocate stream info");
2201 rc = -ENOMEM;
2202 pthread_mutex_unlock(&mMutex);
2203 return rc;
2204 }
2205 stream_info->stream = newStream;
2206 stream_info->status = VALID;
2207 stream_info->channel = NULL;
2208 mStreamInfo.push_back(stream_info);
2209 }
2210 /* Covers Opaque ZSL and API1 F/W ZSL */
2211 if (IS_USAGE_ZSL(newStream->usage)
2212 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2213 if (zslStream != NULL) {
2214 LOGE("Multiple input/reprocess streams requested!");
2215 pthread_mutex_unlock(&mMutex);
2216 return BAD_VALUE;
2217 }
2218 zslStream = newStream;
2219 }
2220 /* Covers YUV reprocess */
2221 if (inputStream != NULL) {
2222 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2223 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2224 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2225 && inputStream->width == newStream->width
2226 && inputStream->height == newStream->height) {
2227 if (zslStream != NULL) {
2228 /* This scenario indicates multiple YUV streams with same size
2229 * as input stream have been requested, since zsl stream handle
2230 * is solely use for the purpose of overriding the size of streams
2231 * which share h/w streams we will just make a guess here as to
2232 * which of the stream is a ZSL stream, this will be refactored
2233 * once we make generic logic for streams sharing encoder output
2234 */
2235 LOGH("Warning, Multiple ip/reprocess streams requested!");
2236 }
2237 zslStream = newStream;
2238 }
2239 }
2240 }
2241
2242 /* If a zsl stream is set, we know that we have configured at least one input or
2243 bidirectional stream */
2244 if (NULL != zslStream) {
2245 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2246 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2247 mInputStreamInfo.format = zslStream->format;
2248 mInputStreamInfo.usage = zslStream->usage;
2249 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2250 mInputStreamInfo.dim.width,
2251 mInputStreamInfo.dim.height,
2252 mInputStreamInfo.format, mInputStreamInfo.usage);
2253 }
2254
2255 cleanAndSortStreamInfo();
2256 if (mMetadataChannel) {
2257 delete mMetadataChannel;
2258 mMetadataChannel = NULL;
2259 }
2260 if (mSupportChannel) {
2261 delete mSupportChannel;
2262 mSupportChannel = NULL;
2263 }
2264
2265 if (mAnalysisChannel) {
2266 delete mAnalysisChannel;
2267 mAnalysisChannel = NULL;
2268 }
2269
2270 if (mDummyBatchChannel) {
2271 delete mDummyBatchChannel;
2272 mDummyBatchChannel = NULL;
2273 }
2274
Emilian Peev7650c122017-01-19 08:24:33 -08002275 if (mDepthChannel) {
2276 mDepthChannel = NULL;
2277 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002278 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002279
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002280 mShutterDispatcher.clear();
2281 mOutputBufferDispatcher.clear();
2282
Thierry Strudel2896d122017-02-23 19:18:03 -08002283 char is_type_value[PROPERTY_VALUE_MAX];
2284 property_get("persist.camera.is_type", is_type_value, "4");
2285 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2286
Binhao Line406f062017-05-03 14:39:44 -07002287 char property_value[PROPERTY_VALUE_MAX];
2288 property_get("persist.camera.gzoom.at", property_value, "0");
2289 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002290 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2291 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2292 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2293 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002294
2295 property_get("persist.camera.gzoom.4k", property_value, "0");
2296 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2297
Thierry Strudel3d639192016-09-09 11:52:26 -07002298 //Create metadata channel and initialize it
2299 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2300 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2301 gCamCapability[mCameraId]->color_arrangement);
2302 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2303 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002304 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 if (mMetadataChannel == NULL) {
2306 LOGE("failed to allocate metadata channel");
2307 rc = -ENOMEM;
2308 pthread_mutex_unlock(&mMutex);
2309 return rc;
2310 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002311 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002312 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2313 if (rc < 0) {
2314 LOGE("metadata channel initialization failed");
2315 delete mMetadataChannel;
2316 mMetadataChannel = NULL;
2317 pthread_mutex_unlock(&mMutex);
2318 return rc;
2319 }
2320
Thierry Strudel2896d122017-02-23 19:18:03 -08002321 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002322 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002323 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002324 // Keep track of preview/video streams indices.
2325 // There could be more than one preview streams, but only one video stream.
2326 int32_t video_stream_idx = -1;
2327 int32_t preview_stream_idx[streamList->num_streams];
2328 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002329 bool previewTnr[streamList->num_streams];
2330 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2331 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2332 // Loop through once to determine preview TNR conditions before creating channels.
2333 for (size_t i = 0; i < streamList->num_streams; i++) {
2334 camera3_stream_t *newStream = streamList->streams[i];
2335 uint32_t stream_usage = newStream->usage;
2336 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2337 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2338 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2339 video_stream_idx = (int32_t)i;
2340 else
2341 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2342 }
2343 }
2344 // By default, preview stream TNR is disabled.
2345 // Enable TNR to the preview stream if all conditions below are satisfied:
2346 // 1. preview resolution == video resolution.
2347 // 2. video stream TNR is enabled.
2348 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2349 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2350 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2351 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2352 if (m_bTnrEnabled && m_bTnrVideo &&
2353 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2354 video_stream->width == preview_stream->width &&
2355 video_stream->height == preview_stream->height) {
2356 previewTnr[preview_stream_idx[i]] = true;
2357 }
2358 }
2359
Thierry Strudel3d639192016-09-09 11:52:26 -07002360 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2361 /* Allocate channel objects for the requested streams */
2362 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002363
Thierry Strudel3d639192016-09-09 11:52:26 -07002364 camera3_stream_t *newStream = streamList->streams[i];
2365 uint32_t stream_usage = newStream->usage;
2366 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2367 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2368 struct camera_info *p_info = NULL;
2369 pthread_mutex_lock(&gCamLock);
2370 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2371 pthread_mutex_unlock(&gCamLock);
2372 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2373 || IS_USAGE_ZSL(newStream->usage)) &&
2374 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002375 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002376 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002377 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2378 if (bUseCommonFeatureMask)
2379 zsl_ppmask = commonFeatureMask;
2380 else
2381 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002382 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002383 if (numStreamsOnEncoder > 0)
2384 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2385 else
2386 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002387 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002388 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002389 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002390 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002391 LOGH("Input stream configured, reprocess config");
2392 } else {
2393 //for non zsl streams find out the format
2394 switch (newStream->format) {
2395 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2396 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002397 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2399 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2400 /* add additional features to pp feature mask */
2401 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2402 mStreamConfigInfo.num_streams);
2403
2404 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2405 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2406 CAM_STREAM_TYPE_VIDEO;
2407 if (m_bTnrEnabled && m_bTnrVideo) {
2408 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2409 CAM_QCOM_FEATURE_CPP_TNR;
2410 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2411 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2412 ~CAM_QCOM_FEATURE_CDS;
2413 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2415 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2416 CAM_QTI_FEATURE_PPEISCORE;
2417 }
Binhao Line406f062017-05-03 14:39:44 -07002418 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2419 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2420 CAM_QCOM_FEATURE_GOOG_ZOOM;
2421 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002422 } else {
2423 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2424 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002425 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2427 CAM_QCOM_FEATURE_CPP_TNR;
2428 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2430 ~CAM_QCOM_FEATURE_CDS;
2431 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002432 if(!m_bSwTnrPreview) {
2433 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2434 ~CAM_QTI_FEATURE_SW_TNR;
2435 }
Binhao Line406f062017-05-03 14:39:44 -07002436 if (is_goog_zoom_preview_enabled) {
2437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2438 CAM_QCOM_FEATURE_GOOG_ZOOM;
2439 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002440 padding_info.width_padding = mSurfaceStridePadding;
2441 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002442 previewSize.width = (int32_t)newStream->width;
2443 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002444 }
2445 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2446 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2447 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2448 newStream->height;
2449 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2450 newStream->width;
2451 }
2452 }
2453 break;
2454 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002455 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002456 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2457 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2458 if (bUseCommonFeatureMask)
2459 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2460 commonFeatureMask;
2461 else
2462 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2463 CAM_QCOM_FEATURE_NONE;
2464 } else {
2465 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2466 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2467 }
2468 break;
2469 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002470 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002471 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2472 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2473 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2474 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2475 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002476 /* Remove rotation if it is not supported
2477 for 4K LiveVideo snapshot case (online processing) */
2478 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2479 CAM_QCOM_FEATURE_ROTATION)) {
2480 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2481 &= ~CAM_QCOM_FEATURE_ROTATION;
2482 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002483 } else {
2484 if (bUseCommonFeatureMask &&
2485 isOnEncoder(maxViewfinderSize, newStream->width,
2486 newStream->height)) {
2487 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2488 } else {
2489 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2490 }
2491 }
2492 if (isZsl) {
2493 if (zslStream) {
2494 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2495 (int32_t)zslStream->width;
2496 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2497 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002500 } else {
2501 LOGE("Error, No ZSL stream identified");
2502 pthread_mutex_unlock(&mMutex);
2503 return -EINVAL;
2504 }
2505 } else if (m_bIs4KVideo) {
2506 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2507 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2508 } else if (bYuv888OverrideJpeg) {
2509 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2510 (int32_t)largeYuv888Size.width;
2511 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2512 (int32_t)largeYuv888Size.height;
2513 }
2514 break;
2515 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2516 case HAL_PIXEL_FORMAT_RAW16:
2517 case HAL_PIXEL_FORMAT_RAW10:
2518 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2519 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2520 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002521 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2522 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2523 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2524 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2525 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2526 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2527 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2528 gCamCapability[mCameraId]->dt[mPDIndex];
2529 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2530 gCamCapability[mCameraId]->vc[mPDIndex];
2531 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002532 break;
2533 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002534 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002535 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2536 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2537 break;
2538 }
2539 }
2540
2541 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2542 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2543 gCamCapability[mCameraId]->color_arrangement);
2544
2545 if (newStream->priv == NULL) {
2546 //New stream, construct channel
2547 switch (newStream->stream_type) {
2548 case CAMERA3_STREAM_INPUT:
2549 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2550 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2551 break;
2552 case CAMERA3_STREAM_BIDIRECTIONAL:
2553 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2554 GRALLOC_USAGE_HW_CAMERA_WRITE;
2555 break;
2556 case CAMERA3_STREAM_OUTPUT:
2557 /* For video encoding stream, set read/write rarely
2558 * flag so that they may be set to un-cached */
2559 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2560 newStream->usage |=
2561 (GRALLOC_USAGE_SW_READ_RARELY |
2562 GRALLOC_USAGE_SW_WRITE_RARELY |
2563 GRALLOC_USAGE_HW_CAMERA_WRITE);
2564 else if (IS_USAGE_ZSL(newStream->usage))
2565 {
2566 LOGD("ZSL usage flag skipping");
2567 }
2568 else if (newStream == zslStream
2569 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2570 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2571 } else
2572 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2573 break;
2574 default:
2575 LOGE("Invalid stream_type %d", newStream->stream_type);
2576 break;
2577 }
2578
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002579 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002580 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2581 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2582 QCamera3ProcessingChannel *channel = NULL;
2583 switch (newStream->format) {
2584 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2585 if ((newStream->usage &
2586 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2587 (streamList->operation_mode ==
2588 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2589 ) {
2590 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2591 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002592 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002593 this,
2594 newStream,
2595 (cam_stream_type_t)
2596 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2597 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2598 mMetadataChannel,
2599 0); //heap buffers are not required for HFR video channel
2600 if (channel == NULL) {
2601 LOGE("allocation of channel failed");
2602 pthread_mutex_unlock(&mMutex);
2603 return -ENOMEM;
2604 }
2605 //channel->getNumBuffers() will return 0 here so use
2606 //MAX_INFLIGH_HFR_REQUESTS
2607 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2608 newStream->priv = channel;
2609 LOGI("num video buffers in HFR mode: %d",
2610 MAX_INFLIGHT_HFR_REQUESTS);
2611 } else {
2612 /* Copy stream contents in HFR preview only case to create
2613 * dummy batch channel so that sensor streaming is in
2614 * HFR mode */
2615 if (!m_bIsVideo && (streamList->operation_mode ==
2616 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2617 mDummyBatchStream = *newStream;
2618 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002619 int bufferCount = MAX_INFLIGHT_REQUESTS;
2620 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2621 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002622 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2623 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2624 bufferCount = m_bIs4KVideo ?
2625 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2626 }
2627
Thierry Strudel2896d122017-02-23 19:18:03 -08002628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002629 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2630 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002631 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 this,
2633 newStream,
2634 (cam_stream_type_t)
2635 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2636 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2637 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002638 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002639 if (channel == NULL) {
2640 LOGE("allocation of channel failed");
2641 pthread_mutex_unlock(&mMutex);
2642 return -ENOMEM;
2643 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002644 /* disable UBWC for preview, though supported,
2645 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002646 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002647 (previewSize.width == (int32_t)videoWidth)&&
2648 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002649 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002650 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002651 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002652 /* When goog_zoom is linked to the preview or video stream,
2653 * disable ubwc to the linked stream */
2654 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2655 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2656 channel->setUBWCEnabled(false);
2657 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002658 newStream->max_buffers = channel->getNumBuffers();
2659 newStream->priv = channel;
2660 }
2661 break;
2662 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2663 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2664 mChannelHandle,
2665 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002666 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002667 this,
2668 newStream,
2669 (cam_stream_type_t)
2670 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2671 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2672 mMetadataChannel);
2673 if (channel == NULL) {
2674 LOGE("allocation of YUV channel failed");
2675 pthread_mutex_unlock(&mMutex);
2676 return -ENOMEM;
2677 }
2678 newStream->max_buffers = channel->getNumBuffers();
2679 newStream->priv = channel;
2680 break;
2681 }
2682 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2683 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002684 case HAL_PIXEL_FORMAT_RAW10: {
2685 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2686 (HAL_DATASPACE_DEPTH != newStream->data_space))
2687 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002688 mRawChannel = new QCamera3RawChannel(
2689 mCameraHandle->camera_handle, mChannelHandle,
2690 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002691 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002692 this, newStream,
2693 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002694 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002695 if (mRawChannel == NULL) {
2696 LOGE("allocation of raw channel failed");
2697 pthread_mutex_unlock(&mMutex);
2698 return -ENOMEM;
2699 }
2700 newStream->max_buffers = mRawChannel->getNumBuffers();
2701 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2702 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002703 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002704 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002705 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2706 mDepthChannel = new QCamera3DepthChannel(
2707 mCameraHandle->camera_handle, mChannelHandle,
2708 mCameraHandle->ops, NULL, NULL, &padding_info,
2709 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2710 mMetadataChannel);
2711 if (NULL == mDepthChannel) {
2712 LOGE("Allocation of depth channel failed");
2713 pthread_mutex_unlock(&mMutex);
2714 return NO_MEMORY;
2715 }
2716 newStream->priv = mDepthChannel;
2717 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2718 } else {
2719 // Max live snapshot inflight buffer is 1. This is to mitigate
2720 // frame drop issues for video snapshot. The more buffers being
2721 // allocated, the more frame drops there are.
2722 mPictureChannel = new QCamera3PicChannel(
2723 mCameraHandle->camera_handle, mChannelHandle,
2724 mCameraHandle->ops, captureResultCb,
2725 setBufferErrorStatus, &padding_info, this, newStream,
2726 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2727 m_bIs4KVideo, isZsl, mMetadataChannel,
2728 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2729 if (mPictureChannel == NULL) {
2730 LOGE("allocation of channel failed");
2731 pthread_mutex_unlock(&mMutex);
2732 return -ENOMEM;
2733 }
2734 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2735 newStream->max_buffers = mPictureChannel->getNumBuffers();
2736 mPictureChannel->overrideYuvSize(
2737 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2738 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002739 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002740 break;
2741
2742 default:
2743 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002744 pthread_mutex_unlock(&mMutex);
2745 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002746 }
2747 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2748 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2749 } else {
2750 LOGE("Error, Unknown stream type");
2751 pthread_mutex_unlock(&mMutex);
2752 return -EINVAL;
2753 }
2754
2755 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002756 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002757 // Here we only care whether it's EIS3 or not
2758 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2759 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2760 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2761 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002762 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002763 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002764 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002765 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2766 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2767 }
2768 }
2769
2770 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2771 it != mStreamInfo.end(); it++) {
2772 if ((*it)->stream == newStream) {
2773 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2774 break;
2775 }
2776 }
2777 } else {
2778 // Channel already exists for this stream
2779 // Do nothing for now
2780 }
2781 padding_info = gCamCapability[mCameraId]->padding_info;
2782
Emilian Peev7650c122017-01-19 08:24:33 -08002783 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002784 * since there is no real stream associated with it
2785 */
Emilian Peev7650c122017-01-19 08:24:33 -08002786 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002787 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2788 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002789 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002790 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002791 }
2792
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002793 // Let buffer dispatcher know the configured streams.
2794 mOutputBufferDispatcher.configureStreams(streamList);
2795
Thierry Strudel2896d122017-02-23 19:18:03 -08002796 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2797 onlyRaw = false;
2798 }
2799
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002800 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002801 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002802 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002803 cam_analysis_info_t analysisInfo;
2804 int32_t ret = NO_ERROR;
2805 ret = mCommon.getAnalysisInfo(
2806 FALSE,
2807 analysisFeatureMask,
2808 &analysisInfo);
2809 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002810 cam_color_filter_arrangement_t analysis_color_arrangement =
2811 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2812 CAM_FILTER_ARRANGEMENT_Y :
2813 gCamCapability[mCameraId]->color_arrangement);
2814 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2815 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002816 cam_dimension_t analysisDim;
2817 analysisDim = mCommon.getMatchingDimension(previewSize,
2818 analysisInfo.analysis_recommended_res);
2819
2820 mAnalysisChannel = new QCamera3SupportChannel(
2821 mCameraHandle->camera_handle,
2822 mChannelHandle,
2823 mCameraHandle->ops,
2824 &analysisInfo.analysis_padding_info,
2825 analysisFeatureMask,
2826 CAM_STREAM_TYPE_ANALYSIS,
2827 &analysisDim,
2828 (analysisInfo.analysis_format
2829 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2830 : CAM_FORMAT_YUV_420_NV21),
2831 analysisInfo.hw_analysis_supported,
2832 gCamCapability[mCameraId]->color_arrangement,
2833 this,
2834 0); // force buffer count to 0
2835 } else {
2836 LOGW("getAnalysisInfo failed, ret = %d", ret);
2837 }
2838 if (!mAnalysisChannel) {
2839 LOGW("Analysis channel cannot be created");
2840 }
2841 }
2842
Thierry Strudel3d639192016-09-09 11:52:26 -07002843 //RAW DUMP channel
2844 if (mEnableRawDump && isRawStreamRequested == false){
2845 cam_dimension_t rawDumpSize;
2846 rawDumpSize = getMaxRawSize(mCameraId);
2847 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2848 setPAAFSupport(rawDumpFeatureMask,
2849 CAM_STREAM_TYPE_RAW,
2850 gCamCapability[mCameraId]->color_arrangement);
2851 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2852 mChannelHandle,
2853 mCameraHandle->ops,
2854 rawDumpSize,
2855 &padding_info,
2856 this, rawDumpFeatureMask);
2857 if (!mRawDumpChannel) {
2858 LOGE("Raw Dump channel cannot be created");
2859 pthread_mutex_unlock(&mMutex);
2860 return -ENOMEM;
2861 }
2862 }
2863
Thierry Strudel3d639192016-09-09 11:52:26 -07002864 if (mAnalysisChannel) {
2865 cam_analysis_info_t analysisInfo;
2866 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2867 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2868 CAM_STREAM_TYPE_ANALYSIS;
2869 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2870 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002871 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002872 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2873 &analysisInfo);
2874 if (rc != NO_ERROR) {
2875 LOGE("getAnalysisInfo failed, ret = %d", rc);
2876 pthread_mutex_unlock(&mMutex);
2877 return rc;
2878 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002879 cam_color_filter_arrangement_t analysis_color_arrangement =
2880 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2881 CAM_FILTER_ARRANGEMENT_Y :
2882 gCamCapability[mCameraId]->color_arrangement);
2883 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2884 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2885 analysis_color_arrangement);
2886
Thierry Strudel3d639192016-09-09 11:52:26 -07002887 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002888 mCommon.getMatchingDimension(previewSize,
2889 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002890 mStreamConfigInfo.num_streams++;
2891 }
2892
Thierry Strudel2896d122017-02-23 19:18:03 -08002893 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002894 cam_analysis_info_t supportInfo;
2895 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2896 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2897 setPAAFSupport(callbackFeatureMask,
2898 CAM_STREAM_TYPE_CALLBACK,
2899 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002900 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002901 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002902 if (ret != NO_ERROR) {
2903 /* Ignore the error for Mono camera
2904 * because the PAAF bit mask is only set
2905 * for CAM_STREAM_TYPE_ANALYSIS stream type
2906 */
2907 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2908 LOGW("getAnalysisInfo failed, ret = %d", ret);
2909 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002910 }
2911 mSupportChannel = new QCamera3SupportChannel(
2912 mCameraHandle->camera_handle,
2913 mChannelHandle,
2914 mCameraHandle->ops,
2915 &gCamCapability[mCameraId]->padding_info,
2916 callbackFeatureMask,
2917 CAM_STREAM_TYPE_CALLBACK,
2918 &QCamera3SupportChannel::kDim,
2919 CAM_FORMAT_YUV_420_NV21,
2920 supportInfo.hw_analysis_supported,
2921 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002922 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002923 if (!mSupportChannel) {
2924 LOGE("dummy channel cannot be created");
2925 pthread_mutex_unlock(&mMutex);
2926 return -ENOMEM;
2927 }
2928 }
2929
2930 if (mSupportChannel) {
2931 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2932 QCamera3SupportChannel::kDim;
2933 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2934 CAM_STREAM_TYPE_CALLBACK;
2935 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2936 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2937 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2938 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2939 gCamCapability[mCameraId]->color_arrangement);
2940 mStreamConfigInfo.num_streams++;
2941 }
2942
2943 if (mRawDumpChannel) {
2944 cam_dimension_t rawSize;
2945 rawSize = getMaxRawSize(mCameraId);
2946 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2947 rawSize;
2948 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2949 CAM_STREAM_TYPE_RAW;
2950 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2951 CAM_QCOM_FEATURE_NONE;
2952 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2953 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2954 gCamCapability[mCameraId]->color_arrangement);
2955 mStreamConfigInfo.num_streams++;
2956 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002957
2958 if (mHdrPlusRawSrcChannel) {
2959 cam_dimension_t rawSize;
2960 rawSize = getMaxRawSize(mCameraId);
2961 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2962 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2963 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2964 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2965 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2966 gCamCapability[mCameraId]->color_arrangement);
2967 mStreamConfigInfo.num_streams++;
2968 }
2969
Thierry Strudel3d639192016-09-09 11:52:26 -07002970 /* In HFR mode, if video stream is not added, create a dummy channel so that
2971 * ISP can create a batch mode even for preview only case. This channel is
2972 * never 'start'ed (no stream-on), it is only 'initialized' */
2973 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2974 !m_bIsVideo) {
2975 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2976 setPAAFSupport(dummyFeatureMask,
2977 CAM_STREAM_TYPE_VIDEO,
2978 gCamCapability[mCameraId]->color_arrangement);
2979 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2980 mChannelHandle,
2981 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002982 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002983 this,
2984 &mDummyBatchStream,
2985 CAM_STREAM_TYPE_VIDEO,
2986 dummyFeatureMask,
2987 mMetadataChannel);
2988 if (NULL == mDummyBatchChannel) {
2989 LOGE("creation of mDummyBatchChannel failed."
2990 "Preview will use non-hfr sensor mode ");
2991 }
2992 }
2993 if (mDummyBatchChannel) {
2994 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2995 mDummyBatchStream.width;
2996 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2997 mDummyBatchStream.height;
2998 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2999 CAM_STREAM_TYPE_VIDEO;
3000 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3001 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3002 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3003 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3004 gCamCapability[mCameraId]->color_arrangement);
3005 mStreamConfigInfo.num_streams++;
3006 }
3007
3008 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3009 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003010 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003011 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003012
3013 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3014 for (pendingRequestIterator i = mPendingRequestsList.begin();
3015 i != mPendingRequestsList.end();) {
3016 i = erasePendingRequest(i);
3017 }
3018 mPendingFrameDropList.clear();
3019 // Initialize/Reset the pending buffers list
3020 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3021 req.mPendingBufferList.clear();
3022 }
3023 mPendingBuffersMap.mPendingBuffersInRequest.clear();
3024
Thierry Strudel3d639192016-09-09 11:52:26 -07003025 mCurJpegMeta.clear();
3026 //Get min frame duration for this streams configuration
3027 deriveMinFrameDuration();
3028
Chien-Yu Chenee335912017-02-09 17:53:20 -08003029 mFirstPreviewIntentSeen = false;
3030
3031 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003032 {
3033 Mutex::Autolock l(gHdrPlusClientLock);
3034 disableHdrPlusModeLocked();
3035 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003036
Thierry Strudel3d639192016-09-09 11:52:26 -07003037 // Update state
3038 mState = CONFIGURED;
3039
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003040 mFirstMetadataCallback = true;
3041
Thierry Strudel3d639192016-09-09 11:52:26 -07003042 pthread_mutex_unlock(&mMutex);
3043
3044 return rc;
3045}
3046
3047/*===========================================================================
3048 * FUNCTION : validateCaptureRequest
3049 *
3050 * DESCRIPTION: validate a capture request from camera service
3051 *
3052 * PARAMETERS :
3053 * @request : request from framework to process
3054 *
3055 * RETURN :
3056 *
3057 *==========================================================================*/
3058int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003059 camera3_capture_request_t *request,
3060 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003061{
3062 ssize_t idx = 0;
3063 const camera3_stream_buffer_t *b;
3064 CameraMetadata meta;
3065
3066 /* Sanity check the request */
3067 if (request == NULL) {
3068 LOGE("NULL capture request");
3069 return BAD_VALUE;
3070 }
3071
3072 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3073 /*settings cannot be null for the first request*/
3074 return BAD_VALUE;
3075 }
3076
3077 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003078 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3079 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003080 LOGE("Request %d: No output buffers provided!",
3081 __FUNCTION__, frameNumber);
3082 return BAD_VALUE;
3083 }
3084 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3085 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3086 request->num_output_buffers, MAX_NUM_STREAMS);
3087 return BAD_VALUE;
3088 }
3089 if (request->input_buffer != NULL) {
3090 b = request->input_buffer;
3091 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3092 LOGE("Request %d: Buffer %ld: Status not OK!",
3093 frameNumber, (long)idx);
3094 return BAD_VALUE;
3095 }
3096 if (b->release_fence != -1) {
3097 LOGE("Request %d: Buffer %ld: Has a release fence!",
3098 frameNumber, (long)idx);
3099 return BAD_VALUE;
3100 }
3101 if (b->buffer == NULL) {
3102 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3103 frameNumber, (long)idx);
3104 return BAD_VALUE;
3105 }
3106 }
3107
3108 // Validate all buffers
3109 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003110 if (b == NULL) {
3111 return BAD_VALUE;
3112 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003113 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003114 QCamera3ProcessingChannel *channel =
3115 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3116 if (channel == NULL) {
3117 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3118 frameNumber, (long)idx);
3119 return BAD_VALUE;
3120 }
3121 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3122 LOGE("Request %d: Buffer %ld: Status not OK!",
3123 frameNumber, (long)idx);
3124 return BAD_VALUE;
3125 }
3126 if (b->release_fence != -1) {
3127 LOGE("Request %d: Buffer %ld: Has a release fence!",
3128 frameNumber, (long)idx);
3129 return BAD_VALUE;
3130 }
3131 if (b->buffer == NULL) {
3132 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3133 frameNumber, (long)idx);
3134 return BAD_VALUE;
3135 }
3136 if (*(b->buffer) == NULL) {
3137 LOGE("Request %d: Buffer %ld: NULL private handle!",
3138 frameNumber, (long)idx);
3139 return BAD_VALUE;
3140 }
3141 idx++;
3142 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003143 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003144 return NO_ERROR;
3145}
3146
3147/*===========================================================================
3148 * FUNCTION : deriveMinFrameDuration
3149 *
3150 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3151 * on currently configured streams.
3152 *
3153 * PARAMETERS : NONE
3154 *
3155 * RETURN : NONE
3156 *
3157 *==========================================================================*/
3158void QCamera3HardwareInterface::deriveMinFrameDuration()
3159{
3160 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003161 bool hasRaw = false;
3162
3163 mMinRawFrameDuration = 0;
3164 mMinJpegFrameDuration = 0;
3165 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003166
3167 maxJpegDim = 0;
3168 maxProcessedDim = 0;
3169 maxRawDim = 0;
3170
3171 // Figure out maximum jpeg, processed, and raw dimensions
3172 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3173 it != mStreamInfo.end(); it++) {
3174
3175 // Input stream doesn't have valid stream_type
3176 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3177 continue;
3178
3179 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3180 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3181 if (dimension > maxJpegDim)
3182 maxJpegDim = dimension;
3183 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3184 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3185 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003186 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003187 if (dimension > maxRawDim)
3188 maxRawDim = dimension;
3189 } else {
3190 if (dimension > maxProcessedDim)
3191 maxProcessedDim = dimension;
3192 }
3193 }
3194
3195 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3196 MAX_SIZES_CNT);
3197
3198 //Assume all jpeg dimensions are in processed dimensions.
3199 if (maxJpegDim > maxProcessedDim)
3200 maxProcessedDim = maxJpegDim;
3201 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003202 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003203 maxRawDim = INT32_MAX;
3204
3205 for (size_t i = 0; i < count; i++) {
3206 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3207 gCamCapability[mCameraId]->raw_dim[i].height;
3208 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3209 maxRawDim = dimension;
3210 }
3211 }
3212
3213 //Find minimum durations for processed, jpeg, and raw
3214 for (size_t i = 0; i < count; i++) {
3215 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3216 gCamCapability[mCameraId]->raw_dim[i].height) {
3217 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3218 break;
3219 }
3220 }
3221 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3222 for (size_t i = 0; i < count; i++) {
3223 if (maxProcessedDim ==
3224 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3225 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3226 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3227 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3228 break;
3229 }
3230 }
3231}
3232
3233/*===========================================================================
3234 * FUNCTION : getMinFrameDuration
3235 *
3236 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3237 * and current request configuration.
3238 *
3239 * PARAMETERS : @request: requset sent by the frameworks
3240 *
3241 * RETURN : min farme duration for a particular request
3242 *
3243 *==========================================================================*/
3244int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3245{
3246 bool hasJpegStream = false;
3247 bool hasRawStream = false;
3248 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3249 const camera3_stream_t *stream = request->output_buffers[i].stream;
3250 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3251 hasJpegStream = true;
3252 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3253 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3254 stream->format == HAL_PIXEL_FORMAT_RAW16)
3255 hasRawStream = true;
3256 }
3257
3258 if (!hasJpegStream)
3259 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3260 else
3261 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3262}
3263
3264/*===========================================================================
3265 * FUNCTION : handleBuffersDuringFlushLock
3266 *
3267 * DESCRIPTION: Account for buffers returned from back-end during flush
3268 * This function is executed while mMutex is held by the caller.
3269 *
3270 * PARAMETERS :
3271 * @buffer: image buffer for the callback
3272 *
3273 * RETURN :
3274 *==========================================================================*/
3275void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3276{
3277 bool buffer_found = false;
3278 for (List<PendingBuffersInRequest>::iterator req =
3279 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3280 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3281 for (List<PendingBufferInfo>::iterator i =
3282 req->mPendingBufferList.begin();
3283 i != req->mPendingBufferList.end(); i++) {
3284 if (i->buffer == buffer->buffer) {
3285 mPendingBuffersMap.numPendingBufsAtFlush--;
3286 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3287 buffer->buffer, req->frame_number,
3288 mPendingBuffersMap.numPendingBufsAtFlush);
3289 buffer_found = true;
3290 break;
3291 }
3292 }
3293 if (buffer_found) {
3294 break;
3295 }
3296 }
3297 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3298 //signal the flush()
3299 LOGD("All buffers returned to HAL. Continue flush");
3300 pthread_cond_signal(&mBuffersCond);
3301 }
3302}
3303
Thierry Strudel3d639192016-09-09 11:52:26 -07003304/*===========================================================================
3305 * FUNCTION : handleBatchMetadata
3306 *
3307 * DESCRIPTION: Handles metadata buffer callback in batch mode
3308 *
3309 * PARAMETERS : @metadata_buf: metadata buffer
3310 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3311 * the meta buf in this method
3312 *
3313 * RETURN :
3314 *
3315 *==========================================================================*/
3316void QCamera3HardwareInterface::handleBatchMetadata(
3317 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3318{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003319 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003320
3321 if (NULL == metadata_buf) {
3322 LOGE("metadata_buf is NULL");
3323 return;
3324 }
3325 /* In batch mode, the metdata will contain the frame number and timestamp of
3326 * the last frame in the batch. Eg: a batch containing buffers from request
3327 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3328 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3329 * multiple process_capture_results */
3330 metadata_buffer_t *metadata =
3331 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3332 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3333 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3334 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3335 uint32_t frame_number = 0, urgent_frame_number = 0;
3336 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3337 bool invalid_metadata = false;
3338 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3339 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003340 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003341
3342 int32_t *p_frame_number_valid =
3343 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3344 uint32_t *p_frame_number =
3345 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3346 int64_t *p_capture_time =
3347 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3348 int32_t *p_urgent_frame_number_valid =
3349 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3350 uint32_t *p_urgent_frame_number =
3351 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3352
3353 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3354 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3355 (NULL == p_urgent_frame_number)) {
3356 LOGE("Invalid metadata");
3357 invalid_metadata = true;
3358 } else {
3359 frame_number_valid = *p_frame_number_valid;
3360 last_frame_number = *p_frame_number;
3361 last_frame_capture_time = *p_capture_time;
3362 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3363 last_urgent_frame_number = *p_urgent_frame_number;
3364 }
3365
3366 /* In batchmode, when no video buffers are requested, set_parms are sent
3367 * for every capture_request. The difference between consecutive urgent
3368 * frame numbers and frame numbers should be used to interpolate the
3369 * corresponding frame numbers and time stamps */
3370 pthread_mutex_lock(&mMutex);
3371 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003372 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3373 if(idx < 0) {
3374 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3375 last_urgent_frame_number);
3376 mState = ERROR;
3377 pthread_mutex_unlock(&mMutex);
3378 return;
3379 }
3380 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003381 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3382 first_urgent_frame_number;
3383
3384 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3385 urgent_frame_number_valid,
3386 first_urgent_frame_number, last_urgent_frame_number);
3387 }
3388
3389 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003390 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3391 if(idx < 0) {
3392 LOGE("Invalid frame number received: %d. Irrecoverable error",
3393 last_frame_number);
3394 mState = ERROR;
3395 pthread_mutex_unlock(&mMutex);
3396 return;
3397 }
3398 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003399 frameNumDiff = last_frame_number + 1 -
3400 first_frame_number;
3401 mPendingBatchMap.removeItem(last_frame_number);
3402
3403 LOGD("frm: valid: %d frm_num: %d - %d",
3404 frame_number_valid,
3405 first_frame_number, last_frame_number);
3406
3407 }
3408 pthread_mutex_unlock(&mMutex);
3409
3410 if (urgent_frame_number_valid || frame_number_valid) {
3411 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3412 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3413 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3414 urgentFrameNumDiff, last_urgent_frame_number);
3415 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3416 LOGE("frameNumDiff: %d frameNum: %d",
3417 frameNumDiff, last_frame_number);
3418 }
3419
3420 for (size_t i = 0; i < loopCount; i++) {
3421 /* handleMetadataWithLock is called even for invalid_metadata for
3422 * pipeline depth calculation */
3423 if (!invalid_metadata) {
3424 /* Infer frame number. Batch metadata contains frame number of the
3425 * last frame */
3426 if (urgent_frame_number_valid) {
3427 if (i < urgentFrameNumDiff) {
3428 urgent_frame_number =
3429 first_urgent_frame_number + i;
3430 LOGD("inferred urgent frame_number: %d",
3431 urgent_frame_number);
3432 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3433 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3434 } else {
3435 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3436 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3437 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3438 }
3439 }
3440
3441 /* Infer frame number. Batch metadata contains frame number of the
3442 * last frame */
3443 if (frame_number_valid) {
3444 if (i < frameNumDiff) {
3445 frame_number = first_frame_number + i;
3446 LOGD("inferred frame_number: %d", frame_number);
3447 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3448 CAM_INTF_META_FRAME_NUMBER, frame_number);
3449 } else {
3450 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3451 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3452 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3453 }
3454 }
3455
3456 if (last_frame_capture_time) {
3457 //Infer timestamp
3458 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003459 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003460 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003461 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003462 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3463 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3464 LOGD("batch capture_time: %lld, capture_time: %lld",
3465 last_frame_capture_time, capture_time);
3466 }
3467 }
3468 pthread_mutex_lock(&mMutex);
3469 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003470 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003471 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3472 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003473 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003474 pthread_mutex_unlock(&mMutex);
3475 }
3476
3477 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003478 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003479 mMetadataChannel->bufDone(metadata_buf);
3480 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003481 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003482 }
3483}
3484
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003485void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3486 camera3_error_msg_code_t errorCode)
3487{
3488 camera3_notify_msg_t notify_msg;
3489 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3490 notify_msg.type = CAMERA3_MSG_ERROR;
3491 notify_msg.message.error.error_code = errorCode;
3492 notify_msg.message.error.error_stream = NULL;
3493 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003494 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003495
3496 return;
3497}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003498
3499/*===========================================================================
3500 * FUNCTION : sendPartialMetadataWithLock
3501 *
3502 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3503 *
3504 * PARAMETERS : @metadata: metadata buffer
3505 * @requestIter: The iterator for the pending capture request for
3506 * which the partial result is being sen
3507 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3508 * last urgent metadata in a batch. Always true for non-batch mode
3509 *
3510 * RETURN :
3511 *
3512 *==========================================================================*/
3513
3514void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3515 metadata_buffer_t *metadata,
3516 const pendingRequestIterator requestIter,
3517 bool lastUrgentMetadataInBatch)
3518{
3519 camera3_capture_result_t result;
3520 memset(&result, 0, sizeof(camera3_capture_result_t));
3521
3522 requestIter->partial_result_cnt++;
3523
3524 // Extract 3A metadata
3525 result.result = translateCbUrgentMetadataToResultMetadata(
3526 metadata, lastUrgentMetadataInBatch);
3527 // Populate metadata result
3528 result.frame_number = requestIter->frame_number;
3529 result.num_output_buffers = 0;
3530 result.output_buffers = NULL;
3531 result.partial_result = requestIter->partial_result_cnt;
3532
3533 {
3534 Mutex::Autolock l(gHdrPlusClientLock);
3535 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3536 // Notify HDR+ client about the partial metadata.
3537 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3538 result.partial_result == PARTIAL_RESULT_COUNT);
3539 }
3540 }
3541
3542 orchestrateResult(&result);
3543 LOGD("urgent frame_number = %u", result.frame_number);
3544 free_camera_metadata((camera_metadata_t *)result.result);
3545}
3546
Thierry Strudel3d639192016-09-09 11:52:26 -07003547/*===========================================================================
3548 * FUNCTION : handleMetadataWithLock
3549 *
3550 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3551 *
3552 * PARAMETERS : @metadata_buf: metadata buffer
3553 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3554 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003555 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3556 * last urgent metadata in a batch. Always true for non-batch mode
3557 * @lastMetadataInBatch: Boolean to indicate whether this is the
3558 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003559 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3560 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003561 *
3562 * RETURN :
3563 *
3564 *==========================================================================*/
3565void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003566 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003567 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3568 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003569{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003570 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003571 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3572 //during flush do not send metadata from this thread
3573 LOGD("not sending metadata during flush or when mState is error");
3574 if (free_and_bufdone_meta_buf) {
3575 mMetadataChannel->bufDone(metadata_buf);
3576 free(metadata_buf);
3577 }
3578 return;
3579 }
3580
3581 //not in flush
3582 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3583 int32_t frame_number_valid, urgent_frame_number_valid;
3584 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003585 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003586 nsecs_t currentSysTime;
3587
3588 int32_t *p_frame_number_valid =
3589 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3590 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3591 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003592 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003593 int32_t *p_urgent_frame_number_valid =
3594 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3595 uint32_t *p_urgent_frame_number =
3596 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3597 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3598 metadata) {
3599 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3600 *p_frame_number_valid, *p_frame_number);
3601 }
3602
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003603 camera_metadata_t *resultMetadata = nullptr;
3604
Thierry Strudel3d639192016-09-09 11:52:26 -07003605 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3606 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3607 LOGE("Invalid metadata");
3608 if (free_and_bufdone_meta_buf) {
3609 mMetadataChannel->bufDone(metadata_buf);
3610 free(metadata_buf);
3611 }
3612 goto done_metadata;
3613 }
3614 frame_number_valid = *p_frame_number_valid;
3615 frame_number = *p_frame_number;
3616 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003617 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003618 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3619 urgent_frame_number = *p_urgent_frame_number;
3620 currentSysTime = systemTime(CLOCK_MONOTONIC);
3621
Jason Lee603176d2017-05-31 11:43:27 -07003622 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3623 const int tries = 3;
3624 nsecs_t bestGap, measured;
3625 for (int i = 0; i < tries; ++i) {
3626 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3627 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3628 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3629 const nsecs_t gap = tmono2 - tmono;
3630 if (i == 0 || gap < bestGap) {
3631 bestGap = gap;
3632 measured = tbase - ((tmono + tmono2) >> 1);
3633 }
3634 }
3635 capture_time -= measured;
3636 }
3637
Thierry Strudel3d639192016-09-09 11:52:26 -07003638 // Detect if buffers from any requests are overdue
3639 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003640 int64_t timeout;
3641 {
3642 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3643 // If there is a pending HDR+ request, the following requests may be blocked until the
3644 // HDR+ request is done. So allow a longer timeout.
3645 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3646 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3647 }
3648
3649 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003650 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003651 assert(missed.stream->priv);
3652 if (missed.stream->priv) {
3653 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3654 assert(ch->mStreams[0]);
3655 if (ch->mStreams[0]) {
3656 LOGE("Cancel missing frame = %d, buffer = %p,"
3657 "stream type = %d, stream format = %d",
3658 req.frame_number, missed.buffer,
3659 ch->mStreams[0]->getMyType(), missed.stream->format);
3660 ch->timeoutFrame(req.frame_number);
3661 }
3662 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003663 }
3664 }
3665 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003666 //For the very first metadata callback, regardless whether it contains valid
3667 //frame number, send the partial metadata for the jumpstarting requests.
3668 //Note that this has to be done even if the metadata doesn't contain valid
3669 //urgent frame number, because in the case only 1 request is ever submitted
3670 //to HAL, there won't be subsequent valid urgent frame number.
3671 if (mFirstMetadataCallback) {
3672 for (pendingRequestIterator i =
3673 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3674 if (i->bUseFirstPartial) {
3675 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3676 }
3677 }
3678 mFirstMetadataCallback = false;
3679 }
3680
Thierry Strudel3d639192016-09-09 11:52:26 -07003681 //Partial result on process_capture_result for timestamp
3682 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003683 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003684
3685 //Recieved an urgent Frame Number, handle it
3686 //using partial results
3687 for (pendingRequestIterator i =
3688 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3689 LOGD("Iterator Frame = %d urgent frame = %d",
3690 i->frame_number, urgent_frame_number);
3691
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003692 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 (i->partial_result_cnt == 0)) {
3694 LOGE("Error: HAL missed urgent metadata for frame number %d",
3695 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003696 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003697 }
3698
3699 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003700 i->partial_result_cnt == 0) {
3701 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003702 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3703 // Instant AEC settled for this frame.
3704 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3705 mInstantAECSettledFrameNumber = urgent_frame_number;
3706 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003707 break;
3708 }
3709 }
3710 }
3711
3712 if (!frame_number_valid) {
3713 LOGD("Not a valid normal frame number, used as SOF only");
3714 if (free_and_bufdone_meta_buf) {
3715 mMetadataChannel->bufDone(metadata_buf);
3716 free(metadata_buf);
3717 }
3718 goto done_metadata;
3719 }
3720 LOGH("valid frame_number = %u, capture_time = %lld",
3721 frame_number, capture_time);
3722
Emilian Peev4e0fe952017-06-30 12:40:09 -07003723 handleDepthDataLocked(metadata->depth_data, frame_number,
3724 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003725
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003726 // Check whether any stream buffer corresponding to this is dropped or not
3727 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3728 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3729 for (auto & pendingRequest : mPendingRequestsList) {
3730 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3731 mInstantAECSettledFrameNumber)) {
3732 camera3_notify_msg_t notify_msg = {};
3733 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003734 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003735 QCamera3ProcessingChannel *channel =
3736 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003737 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003738 if (p_cam_frame_drop) {
3739 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003740 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003741 // Got the stream ID for drop frame.
3742 dropFrame = true;
3743 break;
3744 }
3745 }
3746 } else {
3747 // This is instant AEC case.
3748 // For instant AEC drop the stream untill AEC is settled.
3749 dropFrame = true;
3750 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003751
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003752 if (dropFrame) {
3753 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3754 if (p_cam_frame_drop) {
3755 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003756 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003757 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003758 } else {
3759 // For instant AEC, inform frame drop and frame number
3760 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3761 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003762 pendingRequest.frame_number, streamID,
3763 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003764 }
3765 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003766 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003767 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003768 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003769 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003770 if (p_cam_frame_drop) {
3771 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003772 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003773 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003774 } else {
3775 // For instant AEC, inform frame drop and frame number
3776 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3777 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003778 pendingRequest.frame_number, streamID,
3779 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003780 }
3781 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003782 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003783 PendingFrameDrop.stream_ID = streamID;
3784 // Add the Frame drop info to mPendingFrameDropList
3785 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003787 }
3788 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003789 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003790
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003791 for (auto & pendingRequest : mPendingRequestsList) {
3792 // Find the pending request with the frame number.
3793 if (pendingRequest.frame_number == frame_number) {
3794 // Update the sensor timestamp.
3795 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003796
Thierry Strudel3d639192016-09-09 11:52:26 -07003797
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003798 /* Set the timestamp in display metadata so that clients aware of
3799 private_handle such as VT can use this un-modified timestamps.
3800 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003801 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003802
Thierry Strudel3d639192016-09-09 11:52:26 -07003803 // Find channel requiring metadata, meaning internal offline postprocess
3804 // is needed.
3805 //TODO: for now, we don't support two streams requiring metadata at the same time.
3806 // (because we are not making copies, and metadata buffer is not reference counted.
3807 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003808 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3809 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003810 if (iter->need_metadata) {
3811 internalPproc = true;
3812 QCamera3ProcessingChannel *channel =
3813 (QCamera3ProcessingChannel *)iter->stream->priv;
3814 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003815 if(p_is_metabuf_queued != NULL) {
3816 *p_is_metabuf_queued = true;
3817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003818 break;
3819 }
3820 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003821 for (auto itr = pendingRequest.internalRequestList.begin();
3822 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003823 if (itr->need_metadata) {
3824 internalPproc = true;
3825 QCamera3ProcessingChannel *channel =
3826 (QCamera3ProcessingChannel *)itr->stream->priv;
3827 channel->queueReprocMetadata(metadata_buf);
3828 break;
3829 }
3830 }
3831
Thierry Strudel54dc9782017-02-15 12:12:10 -08003832 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003833
3834 bool *enableZsl = nullptr;
3835 if (gExposeEnableZslKey) {
3836 enableZsl = &pendingRequest.enableZsl;
3837 }
3838
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003839 resultMetadata = translateFromHalMetadata(metadata,
3840 pendingRequest.timestamp, pendingRequest.request_id,
3841 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3842 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003843 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003844 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003845 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003846 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003847 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003848 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003849
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003850 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003851
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003852 if (pendingRequest.blob_request) {
3853 //Dump tuning metadata if enabled and available
3854 char prop[PROPERTY_VALUE_MAX];
3855 memset(prop, 0, sizeof(prop));
3856 property_get("persist.camera.dumpmetadata", prop, "0");
3857 int32_t enabled = atoi(prop);
3858 if (enabled && metadata->is_tuning_params_valid) {
3859 dumpMetadataToFile(metadata->tuning_params,
3860 mMetaFrameCount,
3861 enabled,
3862 "Snapshot",
3863 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003864 }
3865 }
3866
3867 if (!internalPproc) {
3868 LOGD("couldn't find need_metadata for this metadata");
3869 // Return metadata buffer
3870 if (free_and_bufdone_meta_buf) {
3871 mMetadataChannel->bufDone(metadata_buf);
3872 free(metadata_buf);
3873 }
3874 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003875
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003876 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003877 }
3878 }
3879
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003880 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3881
3882 // Try to send out capture result metadata.
3883 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003884 return;
3885
Thierry Strudel3d639192016-09-09 11:52:26 -07003886done_metadata:
3887 for (pendingRequestIterator i = mPendingRequestsList.begin();
3888 i != mPendingRequestsList.end() ;i++) {
3889 i->pipeline_depth++;
3890 }
3891 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3892 unblockRequestIfNecessary();
3893}
3894
3895/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003896 * FUNCTION : handleDepthDataWithLock
3897 *
3898 * DESCRIPTION: Handles incoming depth data
3899 *
3900 * PARAMETERS : @depthData : Depth data
3901 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003902 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003903 *
3904 * RETURN :
3905 *
3906 *==========================================================================*/
3907void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003908 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003909 uint32_t currentFrameNumber;
3910 buffer_handle_t *depthBuffer;
3911
3912 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003913 return;
3914 }
3915
3916 camera3_stream_buffer_t resultBuffer =
3917 {.acquire_fence = -1,
3918 .release_fence = -1,
3919 .status = CAMERA3_BUFFER_STATUS_OK,
3920 .buffer = nullptr,
3921 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003922 do {
3923 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3924 if (nullptr == depthBuffer) {
3925 break;
3926 }
3927
Emilian Peev7650c122017-01-19 08:24:33 -08003928 resultBuffer.buffer = depthBuffer;
3929 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003930 if (valid) {
3931 int32_t rc = mDepthChannel->populateDepthData(depthData,
3932 frameNumber);
3933 if (NO_ERROR != rc) {
3934 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3935 } else {
3936 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3937 }
Emilian Peev7650c122017-01-19 08:24:33 -08003938 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003939 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003940 }
3941 } else if (currentFrameNumber > frameNumber) {
3942 break;
3943 } else {
3944 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3945 {{currentFrameNumber, mDepthChannel->getStream(),
3946 CAMERA3_MSG_ERROR_BUFFER}}};
3947 orchestrateNotify(&notify_msg);
3948
3949 LOGE("Depth buffer for frame number: %d is missing "
3950 "returning back!", currentFrameNumber);
3951 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3952 }
3953 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003954 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003955 } while (currentFrameNumber < frameNumber);
3956}
3957
3958/*===========================================================================
3959 * FUNCTION : notifyErrorFoPendingDepthData
3960 *
3961 * DESCRIPTION: Returns error for any pending depth buffers
3962 *
3963 * PARAMETERS : depthCh - depth channel that needs to get flushed
3964 *
3965 * RETURN :
3966 *
3967 *==========================================================================*/
3968void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3969 QCamera3DepthChannel *depthCh) {
3970 uint32_t currentFrameNumber;
3971 buffer_handle_t *depthBuffer;
3972
3973 if (nullptr == depthCh) {
3974 return;
3975 }
3976
3977 camera3_notify_msg_t notify_msg =
3978 {.type = CAMERA3_MSG_ERROR,
3979 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3980 camera3_stream_buffer_t resultBuffer =
3981 {.acquire_fence = -1,
3982 .release_fence = -1,
3983 .buffer = nullptr,
3984 .stream = depthCh->getStream(),
3985 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003986
3987 while (nullptr !=
3988 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3989 depthCh->unmapBuffer(currentFrameNumber);
3990
3991 notify_msg.message.error.frame_number = currentFrameNumber;
3992 orchestrateNotify(&notify_msg);
3993
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003994 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003995 };
3996}
3997
3998/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003999 * FUNCTION : hdrPlusPerfLock
4000 *
4001 * DESCRIPTION: perf lock for HDR+ using custom intent
4002 *
4003 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4004 *
4005 * RETURN : None
4006 *
4007 *==========================================================================*/
4008void QCamera3HardwareInterface::hdrPlusPerfLock(
4009 mm_camera_super_buf_t *metadata_buf)
4010{
4011 if (NULL == metadata_buf) {
4012 LOGE("metadata_buf is NULL");
4013 return;
4014 }
4015 metadata_buffer_t *metadata =
4016 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4017 int32_t *p_frame_number_valid =
4018 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4019 uint32_t *p_frame_number =
4020 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4021
4022 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4023 LOGE("%s: Invalid metadata", __func__);
4024 return;
4025 }
4026
4027 //acquire perf lock for 5 sec after the last HDR frame is captured
4028 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4029 if ((p_frame_number != NULL) &&
4030 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004031 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004032 }
4033 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004034}
4035
4036/*===========================================================================
4037 * FUNCTION : handleInputBufferWithLock
4038 *
4039 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4040 *
4041 * PARAMETERS : @frame_number: frame number of the input buffer
4042 *
4043 * RETURN :
4044 *
4045 *==========================================================================*/
4046void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4047{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004048 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004049 pendingRequestIterator i = mPendingRequestsList.begin();
4050 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4051 i++;
4052 }
4053 if (i != mPendingRequestsList.end() && i->input_buffer) {
4054 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004055 CameraMetadata settings;
4056 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4057 if(i->settings) {
4058 settings = i->settings;
4059 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4060 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004061 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004062 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004063 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004064 } else {
4065 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004066 }
4067
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004068 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4069 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4070 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004071
4072 camera3_capture_result result;
4073 memset(&result, 0, sizeof(camera3_capture_result));
4074 result.frame_number = frame_number;
4075 result.result = i->settings;
4076 result.input_buffer = i->input_buffer;
4077 result.partial_result = PARTIAL_RESULT_COUNT;
4078
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004079 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004080 LOGD("Input request metadata and input buffer frame_number = %u",
4081 i->frame_number);
4082 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004083
4084 // Dispatch result metadata that may be just unblocked by this reprocess result.
4085 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004086 } else {
4087 LOGE("Could not find input request for frame number %d", frame_number);
4088 }
4089}
4090
4091/*===========================================================================
4092 * FUNCTION : handleBufferWithLock
4093 *
4094 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4095 *
4096 * PARAMETERS : @buffer: image buffer for the callback
4097 * @frame_number: frame number of the image buffer
4098 *
4099 * RETURN :
4100 *
4101 *==========================================================================*/
4102void QCamera3HardwareInterface::handleBufferWithLock(
4103 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4104{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004105 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004106
4107 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4108 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4109 }
4110
Thierry Strudel3d639192016-09-09 11:52:26 -07004111 /* Nothing to be done during error state */
4112 if ((ERROR == mState) || (DEINIT == mState)) {
4113 return;
4114 }
4115 if (mFlushPerf) {
4116 handleBuffersDuringFlushLock(buffer);
4117 return;
4118 }
4119 //not in flush
4120 // If the frame number doesn't exist in the pending request list,
4121 // directly send the buffer to the frameworks, and update pending buffers map
4122 // Otherwise, book-keep the buffer.
4123 pendingRequestIterator i = mPendingRequestsList.begin();
4124 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4125 i++;
4126 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004127
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004128 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004129 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004130 // For a reprocessing request, try to send out result metadata.
4131 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004132 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004133 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004134
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004135 // Check if this frame was dropped.
4136 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4137 m != mPendingFrameDropList.end(); m++) {
4138 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4139 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4140 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4141 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4142 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4143 frame_number, streamID);
4144 m = mPendingFrameDropList.erase(m);
4145 break;
4146 }
4147 }
4148
4149 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4150 LOGH("result frame_number = %d, buffer = %p",
4151 frame_number, buffer->buffer);
4152
4153 mPendingBuffersMap.removeBuf(buffer->buffer);
4154 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4155
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004156 if (mPreviewStarted == false) {
4157 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4158 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004159 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4160
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004161 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4162 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4163 mPreviewStarted = true;
4164
4165 // Set power hint for preview
4166 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4167 }
4168 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004169}
4170
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004171void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004172 const camera_metadata_t *resultMetadata)
4173{
4174 // Find the pending request for this result metadata.
4175 auto requestIter = mPendingRequestsList.begin();
4176 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4177 requestIter++;
4178 }
4179
4180 if (requestIter == mPendingRequestsList.end()) {
4181 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4182 return;
4183 }
4184
4185 // Update the result metadata
4186 requestIter->resultMetadata = resultMetadata;
4187
4188 // Check what type of request this is.
4189 bool liveRequest = false;
4190 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004191 // HDR+ request doesn't have partial results.
4192 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004193 } else if (requestIter->input_buffer != nullptr) {
4194 // Reprocessing request result is the same as settings.
4195 requestIter->resultMetadata = requestIter->settings;
4196 // Reprocessing request doesn't have partial results.
4197 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4198 } else {
4199 liveRequest = true;
4200 requestIter->partial_result_cnt++;
4201 mPendingLiveRequest--;
4202
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004203 {
4204 Mutex::Autolock l(gHdrPlusClientLock);
4205 // For a live request, send the metadata to HDR+ client.
4206 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4207 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4208 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4209 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004210 }
4211 }
4212
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004213 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4214}
4215
4216void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4217 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004218 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4219 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004220 bool readyToSend = true;
4221
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004222 // Iterate through the pending requests to send out result metadata that are ready. Also if
4223 // this result metadata belongs to a live request, notify errors for previous live requests
4224 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004225 auto iter = mPendingRequestsList.begin();
4226 while (iter != mPendingRequestsList.end()) {
4227 // Check if current pending request is ready. If it's not ready, the following pending
4228 // requests are also not ready.
4229 if (readyToSend && iter->resultMetadata == nullptr) {
4230 readyToSend = false;
4231 }
4232
4233 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4234
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004235 camera3_capture_result_t result = {};
4236 result.frame_number = iter->frame_number;
4237 result.result = iter->resultMetadata;
4238 result.partial_result = iter->partial_result_cnt;
4239
4240 // If this pending buffer has result metadata, we may be able to send out shutter callback
4241 // and result metadata.
4242 if (iter->resultMetadata != nullptr) {
4243 if (!readyToSend) {
4244 // If any of the previous pending request is not ready, this pending request is
4245 // also not ready to send in order to keep shutter callbacks and result metadata
4246 // in order.
4247 iter++;
4248 continue;
4249 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004250 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004251 // If the result metadata belongs to a live request, notify errors for previous pending
4252 // live requests.
4253 mPendingLiveRequest--;
4254
4255 CameraMetadata dummyMetadata;
4256 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4257 result.result = dummyMetadata.release();
4258
4259 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004260
4261 // partial_result should be PARTIAL_RESULT_CNT in case of
4262 // ERROR_RESULT.
4263 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4264 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004265 } else {
4266 iter++;
4267 continue;
4268 }
4269
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004270 result.output_buffers = nullptr;
4271 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004272 orchestrateResult(&result);
4273
4274 // For reprocessing, result metadata is the same as settings so do not free it here to
4275 // avoid double free.
4276 if (result.result != iter->settings) {
4277 free_camera_metadata((camera_metadata_t *)result.result);
4278 }
4279 iter->resultMetadata = nullptr;
4280 iter = erasePendingRequest(iter);
4281 }
4282
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004283 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004284 for (auto &iter : mPendingRequestsList) {
4285 // Increment pipeline depth for the following pending requests.
4286 if (iter.frame_number > frameNumber) {
4287 iter.pipeline_depth++;
4288 }
4289 }
4290 }
4291
4292 unblockRequestIfNecessary();
4293}
4294
Thierry Strudel3d639192016-09-09 11:52:26 -07004295/*===========================================================================
4296 * FUNCTION : unblockRequestIfNecessary
4297 *
4298 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4299 * that mMutex is held when this function is called.
4300 *
4301 * PARAMETERS :
4302 *
4303 * RETURN :
4304 *
4305 *==========================================================================*/
4306void QCamera3HardwareInterface::unblockRequestIfNecessary()
4307{
4308 // Unblock process_capture_request
4309 pthread_cond_signal(&mRequestCond);
4310}
4311
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004312/*===========================================================================
4313 * FUNCTION : isHdrSnapshotRequest
4314 *
4315 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4316 *
4317 * PARAMETERS : camera3 request structure
4318 *
4319 * RETURN : boolean decision variable
4320 *
4321 *==========================================================================*/
4322bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4323{
4324 if (request == NULL) {
4325 LOGE("Invalid request handle");
4326 assert(0);
4327 return false;
4328 }
4329
4330 if (!mForceHdrSnapshot) {
4331 CameraMetadata frame_settings;
4332 frame_settings = request->settings;
4333
4334 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4335 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4336 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4337 return false;
4338 }
4339 } else {
4340 return false;
4341 }
4342
4343 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4344 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4345 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4346 return false;
4347 }
4348 } else {
4349 return false;
4350 }
4351 }
4352
4353 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4354 if (request->output_buffers[i].stream->format
4355 == HAL_PIXEL_FORMAT_BLOB) {
4356 return true;
4357 }
4358 }
4359
4360 return false;
4361}
4362/*===========================================================================
4363 * FUNCTION : orchestrateRequest
4364 *
4365 * DESCRIPTION: Orchestrates a capture request from camera service
4366 *
4367 * PARAMETERS :
4368 * @request : request from framework to process
4369 *
4370 * RETURN : Error status codes
4371 *
4372 *==========================================================================*/
4373int32_t QCamera3HardwareInterface::orchestrateRequest(
4374 camera3_capture_request_t *request)
4375{
4376
4377 uint32_t originalFrameNumber = request->frame_number;
4378 uint32_t originalOutputCount = request->num_output_buffers;
4379 const camera_metadata_t *original_settings = request->settings;
4380 List<InternalRequest> internallyRequestedStreams;
4381 List<InternalRequest> emptyInternalList;
4382
4383 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4384 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4385 uint32_t internalFrameNumber;
4386 CameraMetadata modified_meta;
4387
4388
4389 /* Add Blob channel to list of internally requested streams */
4390 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4391 if (request->output_buffers[i].stream->format
4392 == HAL_PIXEL_FORMAT_BLOB) {
4393 InternalRequest streamRequested;
4394 streamRequested.meteringOnly = 1;
4395 streamRequested.need_metadata = 0;
4396 streamRequested.stream = request->output_buffers[i].stream;
4397 internallyRequestedStreams.push_back(streamRequested);
4398 }
4399 }
4400 request->num_output_buffers = 0;
4401 auto itr = internallyRequestedStreams.begin();
4402
4403 /* Modify setting to set compensation */
4404 modified_meta = request->settings;
4405 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4406 uint8_t aeLock = 1;
4407 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4408 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4409 camera_metadata_t *modified_settings = modified_meta.release();
4410 request->settings = modified_settings;
4411
4412 /* Capture Settling & -2x frame */
4413 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4414 request->frame_number = internalFrameNumber;
4415 processCaptureRequest(request, internallyRequestedStreams);
4416
4417 request->num_output_buffers = originalOutputCount;
4418 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4419 request->frame_number = internalFrameNumber;
4420 processCaptureRequest(request, emptyInternalList);
4421 request->num_output_buffers = 0;
4422
4423 modified_meta = modified_settings;
4424 expCompensation = 0;
4425 aeLock = 1;
4426 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4427 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4428 modified_settings = modified_meta.release();
4429 request->settings = modified_settings;
4430
4431 /* Capture Settling & 0X frame */
4432
4433 itr = internallyRequestedStreams.begin();
4434 if (itr == internallyRequestedStreams.end()) {
4435 LOGE("Error Internally Requested Stream list is empty");
4436 assert(0);
4437 } else {
4438 itr->need_metadata = 0;
4439 itr->meteringOnly = 1;
4440 }
4441
4442 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4443 request->frame_number = internalFrameNumber;
4444 processCaptureRequest(request, internallyRequestedStreams);
4445
4446 itr = internallyRequestedStreams.begin();
4447 if (itr == internallyRequestedStreams.end()) {
4448 ALOGE("Error Internally Requested Stream list is empty");
4449 assert(0);
4450 } else {
4451 itr->need_metadata = 1;
4452 itr->meteringOnly = 0;
4453 }
4454
4455 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4456 request->frame_number = internalFrameNumber;
4457 processCaptureRequest(request, internallyRequestedStreams);
4458
4459 /* Capture 2X frame*/
4460 modified_meta = modified_settings;
4461 expCompensation = GB_HDR_2X_STEP_EV;
4462 aeLock = 1;
4463 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4464 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4465 modified_settings = modified_meta.release();
4466 request->settings = modified_settings;
4467
4468 itr = internallyRequestedStreams.begin();
4469 if (itr == internallyRequestedStreams.end()) {
4470 ALOGE("Error Internally Requested Stream list is empty");
4471 assert(0);
4472 } else {
4473 itr->need_metadata = 0;
4474 itr->meteringOnly = 1;
4475 }
4476 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4477 request->frame_number = internalFrameNumber;
4478 processCaptureRequest(request, internallyRequestedStreams);
4479
4480 itr = internallyRequestedStreams.begin();
4481 if (itr == internallyRequestedStreams.end()) {
4482 ALOGE("Error Internally Requested Stream list is empty");
4483 assert(0);
4484 } else {
4485 itr->need_metadata = 1;
4486 itr->meteringOnly = 0;
4487 }
4488
4489 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4490 request->frame_number = internalFrameNumber;
4491 processCaptureRequest(request, internallyRequestedStreams);
4492
4493
4494 /* Capture 2X on original streaming config*/
4495 internallyRequestedStreams.clear();
4496
4497 /* Restore original settings pointer */
4498 request->settings = original_settings;
4499 } else {
4500 uint32_t internalFrameNumber;
4501 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4502 request->frame_number = internalFrameNumber;
4503 return processCaptureRequest(request, internallyRequestedStreams);
4504 }
4505
4506 return NO_ERROR;
4507}
4508
4509/*===========================================================================
4510 * FUNCTION : orchestrateResult
4511 *
4512 * DESCRIPTION: Orchestrates a capture result to camera service
4513 *
4514 * PARAMETERS :
4515 * @request : request from framework to process
4516 *
4517 * RETURN :
4518 *
4519 *==========================================================================*/
4520void QCamera3HardwareInterface::orchestrateResult(
4521 camera3_capture_result_t *result)
4522{
4523 uint32_t frameworkFrameNumber;
4524 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4525 frameworkFrameNumber);
4526 if (rc != NO_ERROR) {
4527 LOGE("Cannot find translated frameworkFrameNumber");
4528 assert(0);
4529 } else {
4530 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004531 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004532 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004533 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004534 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4535 camera_metadata_entry_t entry;
4536 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4537 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004538 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004539 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4540 if (ret != OK)
4541 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004542 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004543 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004544 result->frame_number = frameworkFrameNumber;
4545 mCallbackOps->process_capture_result(mCallbackOps, result);
4546 }
4547 }
4548}
4549
4550/*===========================================================================
4551 * FUNCTION : orchestrateNotify
4552 *
4553 * DESCRIPTION: Orchestrates a notify to camera service
4554 *
4555 * PARAMETERS :
4556 * @request : request from framework to process
4557 *
4558 * RETURN :
4559 *
4560 *==========================================================================*/
4561void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4562{
4563 uint32_t frameworkFrameNumber;
4564 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004565 int32_t rc = NO_ERROR;
4566
4567 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004568 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004569
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004570 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004571 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4572 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4573 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004574 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004575 LOGE("Cannot find translated frameworkFrameNumber");
4576 assert(0);
4577 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004578 }
4579 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004580
4581 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4582 LOGD("Internal Request drop the notifyCb");
4583 } else {
4584 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4585 mCallbackOps->notify(mCallbackOps, notify_msg);
4586 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004587}
4588
4589/*===========================================================================
4590 * FUNCTION : FrameNumberRegistry
4591 *
4592 * DESCRIPTION: Constructor
4593 *
4594 * PARAMETERS :
4595 *
4596 * RETURN :
4597 *
4598 *==========================================================================*/
4599FrameNumberRegistry::FrameNumberRegistry()
4600{
4601 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4602}
4603
4604/*===========================================================================
4605 * FUNCTION : ~FrameNumberRegistry
4606 *
4607 * DESCRIPTION: Destructor
4608 *
4609 * PARAMETERS :
4610 *
4611 * RETURN :
4612 *
4613 *==========================================================================*/
4614FrameNumberRegistry::~FrameNumberRegistry()
4615{
4616}
4617
4618/*===========================================================================
4619 * FUNCTION : PurgeOldEntriesLocked
4620 *
4621 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4622 *
4623 * PARAMETERS :
4624 *
4625 * RETURN : NONE
4626 *
4627 *==========================================================================*/
4628void FrameNumberRegistry::purgeOldEntriesLocked()
4629{
4630 while (_register.begin() != _register.end()) {
4631 auto itr = _register.begin();
4632 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4633 _register.erase(itr);
4634 } else {
4635 return;
4636 }
4637 }
4638}
4639
4640/*===========================================================================
4641 * FUNCTION : allocStoreInternalFrameNumber
4642 *
4643 * DESCRIPTION: Method to note down a framework request and associate a new
4644 * internal request number against it
4645 *
4646 * PARAMETERS :
4647 * @fFrameNumber: Identifier given by framework
4648 * @internalFN : Output parameter which will have the newly generated internal
4649 * entry
4650 *
4651 * RETURN : Error code
4652 *
4653 *==========================================================================*/
4654int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4655 uint32_t &internalFrameNumber)
4656{
4657 Mutex::Autolock lock(mRegistryLock);
4658 internalFrameNumber = _nextFreeInternalNumber++;
4659 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4660 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4661 purgeOldEntriesLocked();
4662 return NO_ERROR;
4663}
4664
4665/*===========================================================================
4666 * FUNCTION : generateStoreInternalFrameNumber
4667 *
4668 * DESCRIPTION: Method to associate a new internal request number independent
4669 * of any associate with framework requests
4670 *
4671 * PARAMETERS :
4672 * @internalFrame#: Output parameter which will have the newly generated internal
4673 *
4674 *
4675 * RETURN : Error code
4676 *
4677 *==========================================================================*/
4678int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4679{
4680 Mutex::Autolock lock(mRegistryLock);
4681 internalFrameNumber = _nextFreeInternalNumber++;
4682 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4683 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4684 purgeOldEntriesLocked();
4685 return NO_ERROR;
4686}
4687
4688/*===========================================================================
4689 * FUNCTION : getFrameworkFrameNumber
4690 *
4691 * DESCRIPTION: Method to query the framework framenumber given an internal #
4692 *
4693 * PARAMETERS :
4694 * @internalFrame#: Internal reference
4695 * @frameworkframenumber: Output parameter holding framework frame entry
4696 *
4697 * RETURN : Error code
4698 *
4699 *==========================================================================*/
4700int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4701 uint32_t &frameworkFrameNumber)
4702{
4703 Mutex::Autolock lock(mRegistryLock);
4704 auto itr = _register.find(internalFrameNumber);
4705 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004706 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004707 return -ENOENT;
4708 }
4709
4710 frameworkFrameNumber = itr->second;
4711 purgeOldEntriesLocked();
4712 return NO_ERROR;
4713}
Thierry Strudel3d639192016-09-09 11:52:26 -07004714
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004715status_t QCamera3HardwareInterface::fillPbStreamConfig(
4716 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4717 QCamera3Channel *channel, uint32_t streamIndex) {
4718 if (config == nullptr) {
4719 LOGE("%s: config is null", __FUNCTION__);
4720 return BAD_VALUE;
4721 }
4722
4723 if (channel == nullptr) {
4724 LOGE("%s: channel is null", __FUNCTION__);
4725 return BAD_VALUE;
4726 }
4727
4728 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4729 if (stream == nullptr) {
4730 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4731 return NAME_NOT_FOUND;
4732 }
4733
4734 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4735 if (streamInfo == nullptr) {
4736 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4737 return NAME_NOT_FOUND;
4738 }
4739
4740 config->id = pbStreamId;
4741 config->image.width = streamInfo->dim.width;
4742 config->image.height = streamInfo->dim.height;
4743 config->image.padding = 0;
4744 config->image.format = pbStreamFormat;
4745
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004746 uint32_t totalPlaneSize = 0;
4747
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004748 // Fill plane information.
4749 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4750 pbcamera::PlaneConfiguration plane;
4751 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4752 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4753 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004754
4755 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004756 }
4757
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004758 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004759 return OK;
4760}
4761
Thierry Strudel3d639192016-09-09 11:52:26 -07004762/*===========================================================================
4763 * FUNCTION : processCaptureRequest
4764 *
4765 * DESCRIPTION: process a capture request from camera service
4766 *
4767 * PARAMETERS :
4768 * @request : request from framework to process
4769 *
4770 * RETURN :
4771 *
4772 *==========================================================================*/
4773int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004774 camera3_capture_request_t *request,
4775 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004776{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004777 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004778 int rc = NO_ERROR;
4779 int32_t request_id;
4780 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004781 bool isVidBufRequested = false;
4782 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004783 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004784
4785 pthread_mutex_lock(&mMutex);
4786
4787 // Validate current state
4788 switch (mState) {
4789 case CONFIGURED:
4790 case STARTED:
4791 /* valid state */
4792 break;
4793
4794 case ERROR:
4795 pthread_mutex_unlock(&mMutex);
4796 handleCameraDeviceError();
4797 return -ENODEV;
4798
4799 default:
4800 LOGE("Invalid state %d", mState);
4801 pthread_mutex_unlock(&mMutex);
4802 return -ENODEV;
4803 }
4804
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004805 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004806 if (rc != NO_ERROR) {
4807 LOGE("incoming request is not valid");
4808 pthread_mutex_unlock(&mMutex);
4809 return rc;
4810 }
4811
4812 meta = request->settings;
4813
4814 // For first capture request, send capture intent, and
4815 // stream on all streams
4816 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004817 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004818 // send an unconfigure to the backend so that the isp
4819 // resources are deallocated
4820 if (!mFirstConfiguration) {
4821 cam_stream_size_info_t stream_config_info;
4822 int32_t hal_version = CAM_HAL_V3;
4823 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4824 stream_config_info.buffer_info.min_buffers =
4825 MIN_INFLIGHT_REQUESTS;
4826 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004827 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004828 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004829 clear_metadata_buffer(mParameters);
4830 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4831 CAM_INTF_PARM_HAL_VERSION, hal_version);
4832 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4833 CAM_INTF_META_STREAM_INFO, stream_config_info);
4834 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4835 mParameters);
4836 if (rc < 0) {
4837 LOGE("set_parms for unconfigure failed");
4838 pthread_mutex_unlock(&mMutex);
4839 return rc;
4840 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004841
Thierry Strudel3d639192016-09-09 11:52:26 -07004842 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004843 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004844 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004845 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004846 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004847 property_get("persist.camera.is_type", is_type_value, "4");
4848 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4849 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4850 property_get("persist.camera.is_type_preview", is_type_value, "4");
4851 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4852 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004853
4854 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4855 int32_t hal_version = CAM_HAL_V3;
4856 uint8_t captureIntent =
4857 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4858 mCaptureIntent = captureIntent;
4859 clear_metadata_buffer(mParameters);
4860 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4861 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4862 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004863 if (mFirstConfiguration) {
4864 // configure instant AEC
4865 // Instant AEC is a session based parameter and it is needed only
4866 // once per complete session after open camera.
4867 // i.e. This is set only once for the first capture request, after open camera.
4868 setInstantAEC(meta);
4869 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004870 uint8_t fwkVideoStabMode=0;
4871 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4872 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4873 }
4874
Xue Tuecac74e2017-04-17 13:58:15 -07004875 // If EIS setprop is enabled then only turn it on for video/preview
4876 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004877 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004878 int32_t vsMode;
4879 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4880 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4881 rc = BAD_VALUE;
4882 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004883 LOGD("setEis %d", setEis);
4884 bool eis3Supported = false;
4885 size_t count = IS_TYPE_MAX;
4886 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4887 for (size_t i = 0; i < count; i++) {
4888 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4889 eis3Supported = true;
4890 break;
4891 }
4892 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004893
4894 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004895 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004896 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4897 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004898 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4899 is_type = isTypePreview;
4900 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4901 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4902 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004904 } else {
4905 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004906 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004907 } else {
4908 is_type = IS_TYPE_NONE;
4909 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004910 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004911 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004912 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4913 }
4914 }
4915
4916 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4917 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4918
Thierry Strudel54dc9782017-02-15 12:12:10 -08004919 //Disable tintless only if the property is set to 0
4920 memset(prop, 0, sizeof(prop));
4921 property_get("persist.camera.tintless.enable", prop, "1");
4922 int32_t tintless_value = atoi(prop);
4923
Thierry Strudel3d639192016-09-09 11:52:26 -07004924 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4925 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004926
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 //Disable CDS for HFR mode or if DIS/EIS is on.
4928 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4929 //after every configure_stream
4930 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4931 (m_bIsVideo)) {
4932 int32_t cds = CAM_CDS_MODE_OFF;
4933 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4934 CAM_INTF_PARM_CDS_MODE, cds))
4935 LOGE("Failed to disable CDS for HFR mode");
4936
4937 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004938
4939 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4940 uint8_t* use_av_timer = NULL;
4941
4942 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004943 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004944 use_av_timer = &m_debug_avtimer;
4945 }
4946 else{
4947 use_av_timer =
4948 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004949 if (use_av_timer) {
4950 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4951 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004952 }
4953
4954 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4955 rc = BAD_VALUE;
4956 }
4957 }
4958
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 setMobicat();
4960
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004961 uint8_t nrMode = 0;
4962 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4963 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4964 }
4965
Thierry Strudel3d639192016-09-09 11:52:26 -07004966 /* Set fps and hfr mode while sending meta stream info so that sensor
4967 * can configure appropriate streaming mode */
4968 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004969 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4970 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004971 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4972 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004973 if (rc == NO_ERROR) {
4974 int32_t max_fps =
4975 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004976 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004977 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4978 }
4979 /* For HFR, more buffers are dequeued upfront to improve the performance */
4980 if (mBatchSize) {
4981 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4982 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4983 }
4984 }
4985 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004986 LOGE("setHalFpsRange failed");
4987 }
4988 }
4989 if (meta.exists(ANDROID_CONTROL_MODE)) {
4990 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4991 rc = extractSceneMode(meta, metaMode, mParameters);
4992 if (rc != NO_ERROR) {
4993 LOGE("extractSceneMode failed");
4994 }
4995 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004996 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004997
Thierry Strudel04e026f2016-10-10 11:27:36 -07004998 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4999 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5000 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5001 rc = setVideoHdrMode(mParameters, vhdr);
5002 if (rc != NO_ERROR) {
5003 LOGE("setVideoHDR is failed");
5004 }
5005 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005006
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005007 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005008 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005009 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005010 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5011 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5012 sensorModeFullFov)) {
5013 rc = BAD_VALUE;
5014 }
5015 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005016 //TODO: validate the arguments, HSV scenemode should have only the
5017 //advertised fps ranges
5018
5019 /*set the capture intent, hal version, tintless, stream info,
5020 *and disenable parameters to the backend*/
5021 LOGD("set_parms META_STREAM_INFO " );
5022 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005023 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5024 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005025 mStreamConfigInfo.type[i],
5026 mStreamConfigInfo.stream_sizes[i].width,
5027 mStreamConfigInfo.stream_sizes[i].height,
5028 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005029 mStreamConfigInfo.format[i],
5030 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005031 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005032
Thierry Strudel3d639192016-09-09 11:52:26 -07005033 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5034 mParameters);
5035 if (rc < 0) {
5036 LOGE("set_parms failed for hal version, stream info");
5037 }
5038
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005039 cam_sensor_mode_info_t sensorModeInfo = {};
5040 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005041 if (rc != NO_ERROR) {
5042 LOGE("Failed to get sensor output size");
5043 pthread_mutex_unlock(&mMutex);
5044 goto error_exit;
5045 }
5046
5047 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5048 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005049 sensorModeInfo.active_array_size.width,
5050 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005051
5052 /* Set batchmode before initializing channel. Since registerBuffer
5053 * internally initializes some of the channels, better set batchmode
5054 * even before first register buffer */
5055 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5056 it != mStreamInfo.end(); it++) {
5057 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5058 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5059 && mBatchSize) {
5060 rc = channel->setBatchSize(mBatchSize);
5061 //Disable per frame map unmap for HFR/batchmode case
5062 rc |= channel->setPerFrameMapUnmap(false);
5063 if (NO_ERROR != rc) {
5064 LOGE("Channel init failed %d", rc);
5065 pthread_mutex_unlock(&mMutex);
5066 goto error_exit;
5067 }
5068 }
5069 }
5070
5071 //First initialize all streams
5072 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5073 it != mStreamInfo.end(); it++) {
5074 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005075
5076 /* Initial value of NR mode is needed before stream on */
5077 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005078 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5079 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005080 setEis) {
5081 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5082 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5083 is_type = mStreamConfigInfo.is_type[i];
5084 break;
5085 }
5086 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005087 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005088 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005089 rc = channel->initialize(IS_TYPE_NONE);
5090 }
5091 if (NO_ERROR != rc) {
5092 LOGE("Channel initialization failed %d", rc);
5093 pthread_mutex_unlock(&mMutex);
5094 goto error_exit;
5095 }
5096 }
5097
5098 if (mRawDumpChannel) {
5099 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5100 if (rc != NO_ERROR) {
5101 LOGE("Error: Raw Dump Channel init failed");
5102 pthread_mutex_unlock(&mMutex);
5103 goto error_exit;
5104 }
5105 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005106 if (mHdrPlusRawSrcChannel) {
5107 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5108 if (rc != NO_ERROR) {
5109 LOGE("Error: HDR+ RAW Source Channel init failed");
5110 pthread_mutex_unlock(&mMutex);
5111 goto error_exit;
5112 }
5113 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005114 if (mSupportChannel) {
5115 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5116 if (rc < 0) {
5117 LOGE("Support channel initialization failed");
5118 pthread_mutex_unlock(&mMutex);
5119 goto error_exit;
5120 }
5121 }
5122 if (mAnalysisChannel) {
5123 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5124 if (rc < 0) {
5125 LOGE("Analysis channel initialization failed");
5126 pthread_mutex_unlock(&mMutex);
5127 goto error_exit;
5128 }
5129 }
5130 if (mDummyBatchChannel) {
5131 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5132 if (rc < 0) {
5133 LOGE("mDummyBatchChannel setBatchSize failed");
5134 pthread_mutex_unlock(&mMutex);
5135 goto error_exit;
5136 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005137 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005138 if (rc < 0) {
5139 LOGE("mDummyBatchChannel initialization failed");
5140 pthread_mutex_unlock(&mMutex);
5141 goto error_exit;
5142 }
5143 }
5144
5145 // Set bundle info
5146 rc = setBundleInfo();
5147 if (rc < 0) {
5148 LOGE("setBundleInfo failed %d", rc);
5149 pthread_mutex_unlock(&mMutex);
5150 goto error_exit;
5151 }
5152
5153 //update settings from app here
5154 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5155 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5156 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5157 }
5158 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5159 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5160 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5161 }
5162 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5163 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5164 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5165
5166 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5167 (mLinkedCameraId != mCameraId) ) {
5168 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5169 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005170 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005171 goto error_exit;
5172 }
5173 }
5174
5175 // add bundle related cameras
5176 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5177 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005178 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5179 &m_pDualCamCmdPtr->bundle_info;
5180 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005181 if (mIsDeviceLinked)
5182 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5183 else
5184 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5185
5186 pthread_mutex_lock(&gCamLock);
5187
5188 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5189 LOGE("Dualcam: Invalid Session Id ");
5190 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005191 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005192 goto error_exit;
5193 }
5194
5195 if (mIsMainCamera == 1) {
5196 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5197 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005198 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005199 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005200 // related session id should be session id of linked session
5201 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5202 } else {
5203 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5204 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005205 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005206 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005207 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5208 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005209 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005210 pthread_mutex_unlock(&gCamLock);
5211
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005212 rc = mCameraHandle->ops->set_dual_cam_cmd(
5213 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005214 if (rc < 0) {
5215 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005216 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005217 goto error_exit;
5218 }
5219 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005220 goto no_error;
5221error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005222 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005223 return rc;
5224no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005225 mWokenUpByDaemon = false;
5226 mPendingLiveRequest = 0;
5227 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 }
5229
5230 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005231 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005232
5233 if (mFlushPerf) {
5234 //we cannot accept any requests during flush
5235 LOGE("process_capture_request cannot proceed during flush");
5236 pthread_mutex_unlock(&mMutex);
5237 return NO_ERROR; //should return an error
5238 }
5239
5240 if (meta.exists(ANDROID_REQUEST_ID)) {
5241 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5242 mCurrentRequestId = request_id;
5243 LOGD("Received request with id: %d", request_id);
5244 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5245 LOGE("Unable to find request id field, \
5246 & no previous id available");
5247 pthread_mutex_unlock(&mMutex);
5248 return NAME_NOT_FOUND;
5249 } else {
5250 LOGD("Re-using old request id");
5251 request_id = mCurrentRequestId;
5252 }
5253
5254 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5255 request->num_output_buffers,
5256 request->input_buffer,
5257 frameNumber);
5258 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005259 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005260 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005261 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005262 uint32_t snapshotStreamId = 0;
5263 for (size_t i = 0; i < request->num_output_buffers; i++) {
5264 const camera3_stream_buffer_t& output = request->output_buffers[i];
5265 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5266
Emilian Peev7650c122017-01-19 08:24:33 -08005267 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5268 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005269 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005270 blob_request = 1;
5271 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5272 }
5273
5274 if (output.acquire_fence != -1) {
5275 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5276 close(output.acquire_fence);
5277 if (rc != OK) {
5278 LOGE("sync wait failed %d", rc);
5279 pthread_mutex_unlock(&mMutex);
5280 return rc;
5281 }
5282 }
5283
Emilian Peev0f3c3162017-03-15 12:57:46 +00005284 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5285 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005286 depthRequestPresent = true;
5287 continue;
5288 }
5289
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005290 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005291 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005292
5293 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5294 isVidBufRequested = true;
5295 }
5296 }
5297
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005298 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5299 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5300 itr++) {
5301 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5302 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5303 channel->getStreamID(channel->getStreamTypeMask());
5304
5305 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5306 isVidBufRequested = true;
5307 }
5308 }
5309
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005311 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005312 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 }
5314 if (blob_request && mRawDumpChannel) {
5315 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005316 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005317 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005318 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005319 }
5320
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005321 {
5322 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5323 // Request a RAW buffer if
5324 // 1. mHdrPlusRawSrcChannel is valid.
5325 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5326 // 3. There is no pending HDR+ request.
5327 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5328 mHdrPlusPendingRequests.size() == 0) {
5329 streamsArray.stream_request[streamsArray.num_streams].streamID =
5330 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5331 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5332 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005333 }
5334
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005335 //extract capture intent
5336 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5337 mCaptureIntent =
5338 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5339 }
5340
5341 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5342 mCacMode =
5343 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5344 }
5345
5346 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005347 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005348
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005349 {
5350 Mutex::Autolock l(gHdrPlusClientLock);
5351 // If this request has a still capture intent, try to submit an HDR+ request.
5352 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5353 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5354 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5355 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005356 }
5357
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005358 if (hdrPlusRequest) {
5359 // For a HDR+ request, just set the frame parameters.
5360 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5361 if (rc < 0) {
5362 LOGE("fail to set frame parameters");
5363 pthread_mutex_unlock(&mMutex);
5364 return rc;
5365 }
5366 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005367 /* Parse the settings:
5368 * - For every request in NORMAL MODE
5369 * - For every request in HFR mode during preview only case
5370 * - For first request of every batch in HFR mode during video
5371 * recording. In batchmode the same settings except frame number is
5372 * repeated in each request of the batch.
5373 */
5374 if (!mBatchSize ||
5375 (mBatchSize && !isVidBufRequested) ||
5376 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005377 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005378 if (rc < 0) {
5379 LOGE("fail to set frame parameters");
5380 pthread_mutex_unlock(&mMutex);
5381 return rc;
5382 }
5383 }
5384 /* For batchMode HFR, setFrameParameters is not called for every
5385 * request. But only frame number of the latest request is parsed.
5386 * Keep track of first and last frame numbers in a batch so that
5387 * metadata for the frame numbers of batch can be duplicated in
5388 * handleBatchMetadta */
5389 if (mBatchSize) {
5390 if (!mToBeQueuedVidBufs) {
5391 //start of the batch
5392 mFirstFrameNumberInBatch = request->frame_number;
5393 }
5394 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5395 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5396 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005397 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005398 return BAD_VALUE;
5399 }
5400 }
5401 if (mNeedSensorRestart) {
5402 /* Unlock the mutex as restartSensor waits on the channels to be
5403 * stopped, which in turn calls stream callback functions -
5404 * handleBufferWithLock and handleMetadataWithLock */
5405 pthread_mutex_unlock(&mMutex);
5406 rc = dynamicUpdateMetaStreamInfo();
5407 if (rc != NO_ERROR) {
5408 LOGE("Restarting the sensor failed");
5409 return BAD_VALUE;
5410 }
5411 mNeedSensorRestart = false;
5412 pthread_mutex_lock(&mMutex);
5413 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005414 if(mResetInstantAEC) {
5415 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5416 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5417 mResetInstantAEC = false;
5418 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005419 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005420 if (request->input_buffer->acquire_fence != -1) {
5421 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5422 close(request->input_buffer->acquire_fence);
5423 if (rc != OK) {
5424 LOGE("input buffer sync wait failed %d", rc);
5425 pthread_mutex_unlock(&mMutex);
5426 return rc;
5427 }
5428 }
5429 }
5430
5431 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5432 mLastCustIntentFrmNum = frameNumber;
5433 }
5434 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005435 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005436 pendingRequestIterator latestRequest;
5437 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005438 pendingRequest.num_buffers = depthRequestPresent ?
5439 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005440 pendingRequest.request_id = request_id;
5441 pendingRequest.blob_request = blob_request;
5442 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005443 if (request->input_buffer) {
5444 pendingRequest.input_buffer =
5445 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5446 *(pendingRequest.input_buffer) = *(request->input_buffer);
5447 pInputBuffer = pendingRequest.input_buffer;
5448 } else {
5449 pendingRequest.input_buffer = NULL;
5450 pInputBuffer = NULL;
5451 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005452 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005453
5454 pendingRequest.pipeline_depth = 0;
5455 pendingRequest.partial_result_cnt = 0;
5456 extractJpegMetadata(mCurJpegMeta, request);
5457 pendingRequest.jpegMetadata = mCurJpegMeta;
5458 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005459 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005460 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5461 mHybridAeEnable =
5462 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5463 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005464
5465 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5466 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005467 /* DevCamDebug metadata processCaptureRequest */
5468 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5469 mDevCamDebugMetaEnable =
5470 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5471 }
5472 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5473 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005474
5475 //extract CAC info
5476 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5477 mCacMode =
5478 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5479 }
5480 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005481 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005482
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005483 // extract enableZsl info
5484 if (gExposeEnableZslKey) {
5485 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5486 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5487 mZslEnabled = pendingRequest.enableZsl;
5488 } else {
5489 pendingRequest.enableZsl = mZslEnabled;
5490 }
5491 }
5492
Thierry Strudel3d639192016-09-09 11:52:26 -07005493 PendingBuffersInRequest bufsForCurRequest;
5494 bufsForCurRequest.frame_number = frameNumber;
5495 // Mark current timestamp for the new request
5496 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005497 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005498
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005499 if (hdrPlusRequest) {
5500 // Save settings for this request.
5501 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5502 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5503
5504 // Add to pending HDR+ request queue.
5505 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5506 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5507
5508 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5509 }
5510
Thierry Strudel3d639192016-09-09 11:52:26 -07005511 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005512 if ((request->output_buffers[i].stream->data_space ==
5513 HAL_DATASPACE_DEPTH) &&
5514 (HAL_PIXEL_FORMAT_BLOB ==
5515 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005516 continue;
5517 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005518 RequestedBufferInfo requestedBuf;
5519 memset(&requestedBuf, 0, sizeof(requestedBuf));
5520 requestedBuf.stream = request->output_buffers[i].stream;
5521 requestedBuf.buffer = NULL;
5522 pendingRequest.buffers.push_back(requestedBuf);
5523
5524 // Add to buffer handle the pending buffers list
5525 PendingBufferInfo bufferInfo;
5526 bufferInfo.buffer = request->output_buffers[i].buffer;
5527 bufferInfo.stream = request->output_buffers[i].stream;
5528 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5529 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5530 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5531 frameNumber, bufferInfo.buffer,
5532 channel->getStreamTypeMask(), bufferInfo.stream->format);
5533 }
5534 // Add this request packet into mPendingBuffersMap
5535 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5536 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5537 mPendingBuffersMap.get_num_overall_buffers());
5538
5539 latestRequest = mPendingRequestsList.insert(
5540 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005541
5542 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5543 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005544 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005545 for (size_t i = 0; i < request->num_output_buffers; i++) {
5546 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5547 }
5548
Thierry Strudel3d639192016-09-09 11:52:26 -07005549 if(mFlush) {
5550 LOGI("mFlush is true");
5551 pthread_mutex_unlock(&mMutex);
5552 return NO_ERROR;
5553 }
5554
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005555 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5556 // channel.
5557 if (!hdrPlusRequest) {
5558 int indexUsed;
5559 // Notify metadata channel we receive a request
5560 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005561
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005562 if(request->input_buffer != NULL){
5563 LOGD("Input request, frame_number %d", frameNumber);
5564 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5565 if (NO_ERROR != rc) {
5566 LOGE("fail to set reproc parameters");
5567 pthread_mutex_unlock(&mMutex);
5568 return rc;
5569 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005570 }
5571
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005572 // Call request on other streams
5573 uint32_t streams_need_metadata = 0;
5574 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5575 for (size_t i = 0; i < request->num_output_buffers; i++) {
5576 const camera3_stream_buffer_t& output = request->output_buffers[i];
5577 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5578
5579 if (channel == NULL) {
5580 LOGW("invalid channel pointer for stream");
5581 continue;
5582 }
5583
5584 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5585 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5586 output.buffer, request->input_buffer, frameNumber);
5587 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005588 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005589 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5590 if (rc < 0) {
5591 LOGE("Fail to request on picture channel");
5592 pthread_mutex_unlock(&mMutex);
5593 return rc;
5594 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005595 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005596 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5597 assert(NULL != mDepthChannel);
5598 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005599
Emilian Peev7650c122017-01-19 08:24:33 -08005600 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5601 if (rc < 0) {
5602 LOGE("Fail to map on depth buffer");
5603 pthread_mutex_unlock(&mMutex);
5604 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005605 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005606 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005607 } else {
5608 LOGD("snapshot request with buffer %p, frame_number %d",
5609 output.buffer, frameNumber);
5610 if (!request->settings) {
5611 rc = channel->request(output.buffer, frameNumber,
5612 NULL, mPrevParameters, indexUsed);
5613 } else {
5614 rc = channel->request(output.buffer, frameNumber,
5615 NULL, mParameters, indexUsed);
5616 }
5617 if (rc < 0) {
5618 LOGE("Fail to request on picture channel");
5619 pthread_mutex_unlock(&mMutex);
5620 return rc;
5621 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005622
Emilian Peev7650c122017-01-19 08:24:33 -08005623 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5624 uint32_t j = 0;
5625 for (j = 0; j < streamsArray.num_streams; j++) {
5626 if (streamsArray.stream_request[j].streamID == streamId) {
5627 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5628 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5629 else
5630 streamsArray.stream_request[j].buf_index = indexUsed;
5631 break;
5632 }
5633 }
5634 if (j == streamsArray.num_streams) {
5635 LOGE("Did not find matching stream to update index");
5636 assert(0);
5637 }
5638
5639 pendingBufferIter->need_metadata = true;
5640 streams_need_metadata++;
5641 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005642 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005643 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5644 bool needMetadata = false;
5645 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5646 rc = yuvChannel->request(output.buffer, frameNumber,
5647 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5648 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005649 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005650 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005651 pthread_mutex_unlock(&mMutex);
5652 return rc;
5653 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005654
5655 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5656 uint32_t j = 0;
5657 for (j = 0; j < streamsArray.num_streams; j++) {
5658 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005659 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5660 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5661 else
5662 streamsArray.stream_request[j].buf_index = indexUsed;
5663 break;
5664 }
5665 }
5666 if (j == streamsArray.num_streams) {
5667 LOGE("Did not find matching stream to update index");
5668 assert(0);
5669 }
5670
5671 pendingBufferIter->need_metadata = needMetadata;
5672 if (needMetadata)
5673 streams_need_metadata += 1;
5674 LOGD("calling YUV channel request, need_metadata is %d",
5675 needMetadata);
5676 } else {
5677 LOGD("request with buffer %p, frame_number %d",
5678 output.buffer, frameNumber);
5679
5680 rc = channel->request(output.buffer, frameNumber, indexUsed);
5681
5682 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5683 uint32_t j = 0;
5684 for (j = 0; j < streamsArray.num_streams; j++) {
5685 if (streamsArray.stream_request[j].streamID == streamId) {
5686 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5687 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5688 else
5689 streamsArray.stream_request[j].buf_index = indexUsed;
5690 break;
5691 }
5692 }
5693 if (j == streamsArray.num_streams) {
5694 LOGE("Did not find matching stream to update index");
5695 assert(0);
5696 }
5697
5698 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5699 && mBatchSize) {
5700 mToBeQueuedVidBufs++;
5701 if (mToBeQueuedVidBufs == mBatchSize) {
5702 channel->queueBatchBuf();
5703 }
5704 }
5705 if (rc < 0) {
5706 LOGE("request failed");
5707 pthread_mutex_unlock(&mMutex);
5708 return rc;
5709 }
5710 }
5711 pendingBufferIter++;
5712 }
5713
5714 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5715 itr++) {
5716 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5717
5718 if (channel == NULL) {
5719 LOGE("invalid channel pointer for stream");
5720 assert(0);
5721 return BAD_VALUE;
5722 }
5723
5724 InternalRequest requestedStream;
5725 requestedStream = (*itr);
5726
5727
5728 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5729 LOGD("snapshot request internally input buffer %p, frame_number %d",
5730 request->input_buffer, frameNumber);
5731 if(request->input_buffer != NULL){
5732 rc = channel->request(NULL, frameNumber,
5733 pInputBuffer, &mReprocMeta, indexUsed, true,
5734 requestedStream.meteringOnly);
5735 if (rc < 0) {
5736 LOGE("Fail to request on picture channel");
5737 pthread_mutex_unlock(&mMutex);
5738 return rc;
5739 }
5740 } else {
5741 LOGD("snapshot request with frame_number %d", frameNumber);
5742 if (!request->settings) {
5743 rc = channel->request(NULL, frameNumber,
5744 NULL, mPrevParameters, indexUsed, true,
5745 requestedStream.meteringOnly);
5746 } else {
5747 rc = channel->request(NULL, frameNumber,
5748 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5749 }
5750 if (rc < 0) {
5751 LOGE("Fail to request on picture channel");
5752 pthread_mutex_unlock(&mMutex);
5753 return rc;
5754 }
5755
5756 if ((*itr).meteringOnly != 1) {
5757 requestedStream.need_metadata = 1;
5758 streams_need_metadata++;
5759 }
5760 }
5761
5762 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5763 uint32_t j = 0;
5764 for (j = 0; j < streamsArray.num_streams; j++) {
5765 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005766 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5767 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5768 else
5769 streamsArray.stream_request[j].buf_index = indexUsed;
5770 break;
5771 }
5772 }
5773 if (j == streamsArray.num_streams) {
5774 LOGE("Did not find matching stream to update index");
5775 assert(0);
5776 }
5777
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005778 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005779 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005780 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005781 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005782 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005783 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005784 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005785
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005786 //If 2 streams have need_metadata set to true, fail the request, unless
5787 //we copy/reference count the metadata buffer
5788 if (streams_need_metadata > 1) {
5789 LOGE("not supporting request in which two streams requires"
5790 " 2 HAL metadata for reprocessing");
5791 pthread_mutex_unlock(&mMutex);
5792 return -EINVAL;
5793 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005794
Emilian Peev656e4fa2017-06-02 16:47:04 +01005795 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5796 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5797 if (depthRequestPresent && mDepthChannel) {
5798 if (request->settings) {
5799 camera_metadata_ro_entry entry;
5800 if (find_camera_metadata_ro_entry(request->settings,
5801 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5802 if (entry.data.u8[0]) {
5803 pdafEnable = CAM_PD_DATA_ENABLED;
5804 } else {
5805 pdafEnable = CAM_PD_DATA_SKIP;
5806 }
5807 mDepthCloudMode = pdafEnable;
5808 } else {
5809 pdafEnable = mDepthCloudMode;
5810 }
5811 } else {
5812 pdafEnable = mDepthCloudMode;
5813 }
5814 }
5815
Emilian Peev7650c122017-01-19 08:24:33 -08005816 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5817 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5818 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5819 pthread_mutex_unlock(&mMutex);
5820 return BAD_VALUE;
5821 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005822
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005823 if (request->input_buffer == NULL) {
5824 /* Set the parameters to backend:
5825 * - For every request in NORMAL MODE
5826 * - For every request in HFR mode during preview only case
5827 * - Once every batch in HFR mode during video recording
5828 */
5829 if (!mBatchSize ||
5830 (mBatchSize && !isVidBufRequested) ||
5831 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5832 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5833 mBatchSize, isVidBufRequested,
5834 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005835
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005836 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5837 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5838 uint32_t m = 0;
5839 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5840 if (streamsArray.stream_request[k].streamID ==
5841 mBatchedStreamsArray.stream_request[m].streamID)
5842 break;
5843 }
5844 if (m == mBatchedStreamsArray.num_streams) {
5845 mBatchedStreamsArray.stream_request\
5846 [mBatchedStreamsArray.num_streams].streamID =
5847 streamsArray.stream_request[k].streamID;
5848 mBatchedStreamsArray.stream_request\
5849 [mBatchedStreamsArray.num_streams].buf_index =
5850 streamsArray.stream_request[k].buf_index;
5851 mBatchedStreamsArray.num_streams =
5852 mBatchedStreamsArray.num_streams + 1;
5853 }
5854 }
5855 streamsArray = mBatchedStreamsArray;
5856 }
5857 /* Update stream id of all the requested buffers */
5858 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5859 streamsArray)) {
5860 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005861 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005862 return BAD_VALUE;
5863 }
5864
5865 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5866 mParameters);
5867 if (rc < 0) {
5868 LOGE("set_parms failed");
5869 }
5870 /* reset to zero coz, the batch is queued */
5871 mToBeQueuedVidBufs = 0;
5872 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5873 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5874 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005875 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5876 uint32_t m = 0;
5877 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5878 if (streamsArray.stream_request[k].streamID ==
5879 mBatchedStreamsArray.stream_request[m].streamID)
5880 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005881 }
5882 if (m == mBatchedStreamsArray.num_streams) {
5883 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5884 streamID = streamsArray.stream_request[k].streamID;
5885 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5886 buf_index = streamsArray.stream_request[k].buf_index;
5887 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5888 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005889 }
5890 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005891 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005892
5893 // Start all streams after the first setting is sent, so that the
5894 // setting can be applied sooner: (0 + apply_delay)th frame.
5895 if (mState == CONFIGURED && mChannelHandle) {
5896 //Then start them.
5897 LOGH("Start META Channel");
5898 rc = mMetadataChannel->start();
5899 if (rc < 0) {
5900 LOGE("META channel start failed");
5901 pthread_mutex_unlock(&mMutex);
5902 return rc;
5903 }
5904
5905 if (mAnalysisChannel) {
5906 rc = mAnalysisChannel->start();
5907 if (rc < 0) {
5908 LOGE("Analysis channel start failed");
5909 mMetadataChannel->stop();
5910 pthread_mutex_unlock(&mMutex);
5911 return rc;
5912 }
5913 }
5914
5915 if (mSupportChannel) {
5916 rc = mSupportChannel->start();
5917 if (rc < 0) {
5918 LOGE("Support channel start failed");
5919 mMetadataChannel->stop();
5920 /* Although support and analysis are mutually exclusive today
5921 adding it in anycase for future proofing */
5922 if (mAnalysisChannel) {
5923 mAnalysisChannel->stop();
5924 }
5925 pthread_mutex_unlock(&mMutex);
5926 return rc;
5927 }
5928 }
5929 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5930 it != mStreamInfo.end(); it++) {
5931 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5932 LOGH("Start Processing Channel mask=%d",
5933 channel->getStreamTypeMask());
5934 rc = channel->start();
5935 if (rc < 0) {
5936 LOGE("channel start failed");
5937 pthread_mutex_unlock(&mMutex);
5938 return rc;
5939 }
5940 }
5941
5942 if (mRawDumpChannel) {
5943 LOGD("Starting raw dump stream");
5944 rc = mRawDumpChannel->start();
5945 if (rc != NO_ERROR) {
5946 LOGE("Error Starting Raw Dump Channel");
5947 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5948 it != mStreamInfo.end(); it++) {
5949 QCamera3Channel *channel =
5950 (QCamera3Channel *)(*it)->stream->priv;
5951 LOGH("Stopping Processing Channel mask=%d",
5952 channel->getStreamTypeMask());
5953 channel->stop();
5954 }
5955 if (mSupportChannel)
5956 mSupportChannel->stop();
5957 if (mAnalysisChannel) {
5958 mAnalysisChannel->stop();
5959 }
5960 mMetadataChannel->stop();
5961 pthread_mutex_unlock(&mMutex);
5962 return rc;
5963 }
5964 }
5965
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005966 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005967 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005968 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005969 if (rc != NO_ERROR) {
5970 LOGE("start_channel failed %d", rc);
5971 pthread_mutex_unlock(&mMutex);
5972 return rc;
5973 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005974
5975 {
5976 // Configure Easel for stream on.
5977 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005978
5979 // Now that sensor mode should have been selected, get the selected sensor mode
5980 // info.
5981 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5982 getCurrentSensorModeInfo(mSensorModeInfo);
5983
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005984 if (EaselManagerClientOpened) {
5985 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen44abb642017-06-02 18:00:38 -07005986 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5987 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005988 if (rc != OK) {
5989 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5990 mCameraId, mSensorModeInfo.op_pixel_clk);
5991 pthread_mutex_unlock(&mMutex);
5992 return rc;
5993 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005994 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005995 }
5996 }
5997
5998 // Start sensor streaming.
5999 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6000 mChannelHandle);
6001 if (rc != NO_ERROR) {
6002 LOGE("start_sensor_stream_on failed %d", rc);
6003 pthread_mutex_unlock(&mMutex);
6004 return rc;
6005 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006006 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006007 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006008 }
6009
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006010 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07006011 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006012 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -07006013 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006014 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6015 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6016 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6017 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
6018 rc = enableHdrPlusModeLocked();
6019 if (rc != OK) {
6020 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6021 pthread_mutex_unlock(&mMutex);
6022 return rc;
6023 }
6024
6025 mFirstPreviewIntentSeen = true;
6026 }
6027 }
6028
Thierry Strudel3d639192016-09-09 11:52:26 -07006029 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6030
6031 mState = STARTED;
6032 // Added a timed condition wait
6033 struct timespec ts;
6034 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006035 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006036 if (rc < 0) {
6037 isValidTimeout = 0;
6038 LOGE("Error reading the real time clock!!");
6039 }
6040 else {
6041 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006042 int64_t timeout = 5;
6043 {
6044 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6045 // If there is a pending HDR+ request, the following requests may be blocked until the
6046 // HDR+ request is done. So allow a longer timeout.
6047 if (mHdrPlusPendingRequests.size() > 0) {
6048 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6049 }
6050 }
6051 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006052 }
6053 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006054 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006055 (mState != ERROR) && (mState != DEINIT)) {
6056 if (!isValidTimeout) {
6057 LOGD("Blocking on conditional wait");
6058 pthread_cond_wait(&mRequestCond, &mMutex);
6059 }
6060 else {
6061 LOGD("Blocking on timed conditional wait");
6062 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6063 if (rc == ETIMEDOUT) {
6064 rc = -ENODEV;
6065 LOGE("Unblocked on timeout!!!!");
6066 break;
6067 }
6068 }
6069 LOGD("Unblocked");
6070 if (mWokenUpByDaemon) {
6071 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006072 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006073 break;
6074 }
6075 }
6076 pthread_mutex_unlock(&mMutex);
6077
6078 return rc;
6079}
6080
6081/*===========================================================================
6082 * FUNCTION : dump
6083 *
6084 * DESCRIPTION:
6085 *
6086 * PARAMETERS :
6087 *
6088 *
6089 * RETURN :
6090 *==========================================================================*/
6091void QCamera3HardwareInterface::dump(int fd)
6092{
6093 pthread_mutex_lock(&mMutex);
6094 dprintf(fd, "\n Camera HAL3 information Begin \n");
6095
6096 dprintf(fd, "\nNumber of pending requests: %zu \n",
6097 mPendingRequestsList.size());
6098 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6099 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6100 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6101 for(pendingRequestIterator i = mPendingRequestsList.begin();
6102 i != mPendingRequestsList.end(); i++) {
6103 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6104 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6105 i->input_buffer);
6106 }
6107 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6108 mPendingBuffersMap.get_num_overall_buffers());
6109 dprintf(fd, "-------+------------------\n");
6110 dprintf(fd, " Frame | Stream type mask \n");
6111 dprintf(fd, "-------+------------------\n");
6112 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6113 for(auto &j : req.mPendingBufferList) {
6114 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6115 dprintf(fd, " %5d | %11d \n",
6116 req.frame_number, channel->getStreamTypeMask());
6117 }
6118 }
6119 dprintf(fd, "-------+------------------\n");
6120
6121 dprintf(fd, "\nPending frame drop list: %zu\n",
6122 mPendingFrameDropList.size());
6123 dprintf(fd, "-------+-----------\n");
6124 dprintf(fd, " Frame | Stream ID \n");
6125 dprintf(fd, "-------+-----------\n");
6126 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6127 i != mPendingFrameDropList.end(); i++) {
6128 dprintf(fd, " %5d | %9d \n",
6129 i->frame_number, i->stream_ID);
6130 }
6131 dprintf(fd, "-------+-----------\n");
6132
6133 dprintf(fd, "\n Camera HAL3 information End \n");
6134
6135 /* use dumpsys media.camera as trigger to send update debug level event */
6136 mUpdateDebugLevel = true;
6137 pthread_mutex_unlock(&mMutex);
6138 return;
6139}
6140
6141/*===========================================================================
6142 * FUNCTION : flush
6143 *
6144 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6145 * conditionally restarts channels
6146 *
6147 * PARAMETERS :
6148 * @ restartChannels: re-start all channels
6149 *
6150 *
6151 * RETURN :
6152 * 0 on success
6153 * Error code on failure
6154 *==========================================================================*/
6155int QCamera3HardwareInterface::flush(bool restartChannels)
6156{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006157 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006158 int32_t rc = NO_ERROR;
6159
6160 LOGD("Unblocking Process Capture Request");
6161 pthread_mutex_lock(&mMutex);
6162 mFlush = true;
6163 pthread_mutex_unlock(&mMutex);
6164
6165 rc = stopAllChannels();
6166 // unlink of dualcam
6167 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006168 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6169 &m_pDualCamCmdPtr->bundle_info;
6170 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006171 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6172 pthread_mutex_lock(&gCamLock);
6173
6174 if (mIsMainCamera == 1) {
6175 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6176 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006177 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006178 // related session id should be session id of linked session
6179 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6180 } else {
6181 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6182 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006183 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006184 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6185 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006186 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006187 pthread_mutex_unlock(&gCamLock);
6188
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006189 rc = mCameraHandle->ops->set_dual_cam_cmd(
6190 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006191 if (rc < 0) {
6192 LOGE("Dualcam: Unlink failed, but still proceed to close");
6193 }
6194 }
6195
6196 if (rc < 0) {
6197 LOGE("stopAllChannels failed");
6198 return rc;
6199 }
6200 if (mChannelHandle) {
6201 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6202 mChannelHandle);
6203 }
6204
6205 // Reset bundle info
6206 rc = setBundleInfo();
6207 if (rc < 0) {
6208 LOGE("setBundleInfo failed %d", rc);
6209 return rc;
6210 }
6211
6212 // Mutex Lock
6213 pthread_mutex_lock(&mMutex);
6214
6215 // Unblock process_capture_request
6216 mPendingLiveRequest = 0;
6217 pthread_cond_signal(&mRequestCond);
6218
6219 rc = notifyErrorForPendingRequests();
6220 if (rc < 0) {
6221 LOGE("notifyErrorForPendingRequests failed");
6222 pthread_mutex_unlock(&mMutex);
6223 return rc;
6224 }
6225
6226 mFlush = false;
6227
6228 // Start the Streams/Channels
6229 if (restartChannels) {
6230 rc = startAllChannels();
6231 if (rc < 0) {
6232 LOGE("startAllChannels failed");
6233 pthread_mutex_unlock(&mMutex);
6234 return rc;
6235 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006236 if (mChannelHandle) {
6237 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006238 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006239 if (rc < 0) {
6240 LOGE("start_channel failed");
6241 pthread_mutex_unlock(&mMutex);
6242 return rc;
6243 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006244 }
6245 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006246 pthread_mutex_unlock(&mMutex);
6247
6248 return 0;
6249}
6250
6251/*===========================================================================
6252 * FUNCTION : flushPerf
6253 *
6254 * DESCRIPTION: This is the performance optimization version of flush that does
6255 * not use stream off, rather flushes the system
6256 *
6257 * PARAMETERS :
6258 *
6259 *
6260 * RETURN : 0 : success
6261 * -EINVAL: input is malformed (device is not valid)
6262 * -ENODEV: if the device has encountered a serious error
6263 *==========================================================================*/
6264int QCamera3HardwareInterface::flushPerf()
6265{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006266 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006267 int32_t rc = 0;
6268 struct timespec timeout;
6269 bool timed_wait = false;
6270
6271 pthread_mutex_lock(&mMutex);
6272 mFlushPerf = true;
6273 mPendingBuffersMap.numPendingBufsAtFlush =
6274 mPendingBuffersMap.get_num_overall_buffers();
6275 LOGD("Calling flush. Wait for %d buffers to return",
6276 mPendingBuffersMap.numPendingBufsAtFlush);
6277
6278 /* send the flush event to the backend */
6279 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6280 if (rc < 0) {
6281 LOGE("Error in flush: IOCTL failure");
6282 mFlushPerf = false;
6283 pthread_mutex_unlock(&mMutex);
6284 return -ENODEV;
6285 }
6286
6287 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6288 LOGD("No pending buffers in HAL, return flush");
6289 mFlushPerf = false;
6290 pthread_mutex_unlock(&mMutex);
6291 return rc;
6292 }
6293
6294 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006295 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006296 if (rc < 0) {
6297 LOGE("Error reading the real time clock, cannot use timed wait");
6298 } else {
6299 timeout.tv_sec += FLUSH_TIMEOUT;
6300 timed_wait = true;
6301 }
6302
6303 //Block on conditional variable
6304 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6305 LOGD("Waiting on mBuffersCond");
6306 if (!timed_wait) {
6307 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6308 if (rc != 0) {
6309 LOGE("pthread_cond_wait failed due to rc = %s",
6310 strerror(rc));
6311 break;
6312 }
6313 } else {
6314 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6315 if (rc != 0) {
6316 LOGE("pthread_cond_timedwait failed due to rc = %s",
6317 strerror(rc));
6318 break;
6319 }
6320 }
6321 }
6322 if (rc != 0) {
6323 mFlushPerf = false;
6324 pthread_mutex_unlock(&mMutex);
6325 return -ENODEV;
6326 }
6327
6328 LOGD("Received buffers, now safe to return them");
6329
6330 //make sure the channels handle flush
6331 //currently only required for the picture channel to release snapshot resources
6332 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6333 it != mStreamInfo.end(); it++) {
6334 QCamera3Channel *channel = (*it)->channel;
6335 if (channel) {
6336 rc = channel->flush();
6337 if (rc) {
6338 LOGE("Flushing the channels failed with error %d", rc);
6339 // even though the channel flush failed we need to continue and
6340 // return the buffers we have to the framework, however the return
6341 // value will be an error
6342 rc = -ENODEV;
6343 }
6344 }
6345 }
6346
6347 /* notify the frameworks and send errored results */
6348 rc = notifyErrorForPendingRequests();
6349 if (rc < 0) {
6350 LOGE("notifyErrorForPendingRequests failed");
6351 pthread_mutex_unlock(&mMutex);
6352 return rc;
6353 }
6354
6355 //unblock process_capture_request
6356 mPendingLiveRequest = 0;
6357 unblockRequestIfNecessary();
6358
6359 mFlushPerf = false;
6360 pthread_mutex_unlock(&mMutex);
6361 LOGD ("Flush Operation complete. rc = %d", rc);
6362 return rc;
6363}
6364
6365/*===========================================================================
6366 * FUNCTION : handleCameraDeviceError
6367 *
6368 * DESCRIPTION: This function calls internal flush and notifies the error to
6369 * framework and updates the state variable.
6370 *
6371 * PARAMETERS : None
6372 *
6373 * RETURN : NO_ERROR on Success
6374 * Error code on failure
6375 *==========================================================================*/
6376int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6377{
6378 int32_t rc = NO_ERROR;
6379
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006380 {
6381 Mutex::Autolock lock(mFlushLock);
6382 pthread_mutex_lock(&mMutex);
6383 if (mState != ERROR) {
6384 //if mState != ERROR, nothing to be done
6385 pthread_mutex_unlock(&mMutex);
6386 return NO_ERROR;
6387 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006388 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006389
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006390 rc = flush(false /* restart channels */);
6391 if (NO_ERROR != rc) {
6392 LOGE("internal flush to handle mState = ERROR failed");
6393 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006394
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006395 pthread_mutex_lock(&mMutex);
6396 mState = DEINIT;
6397 pthread_mutex_unlock(&mMutex);
6398 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006399
6400 camera3_notify_msg_t notify_msg;
6401 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6402 notify_msg.type = CAMERA3_MSG_ERROR;
6403 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6404 notify_msg.message.error.error_stream = NULL;
6405 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006406 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006407
6408 return rc;
6409}
6410
6411/*===========================================================================
6412 * FUNCTION : captureResultCb
6413 *
6414 * DESCRIPTION: Callback handler for all capture result
6415 * (streams, as well as metadata)
6416 *
6417 * PARAMETERS :
6418 * @metadata : metadata information
6419 * @buffer : actual gralloc buffer to be returned to frameworks.
6420 * NULL if metadata.
6421 *
6422 * RETURN : NONE
6423 *==========================================================================*/
6424void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6425 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6426{
6427 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006428 pthread_mutex_lock(&mMutex);
6429 uint8_t batchSize = mBatchSize;
6430 pthread_mutex_unlock(&mMutex);
6431 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006432 handleBatchMetadata(metadata_buf,
6433 true /* free_and_bufdone_meta_buf */);
6434 } else { /* mBatchSize = 0 */
6435 hdrPlusPerfLock(metadata_buf);
6436 pthread_mutex_lock(&mMutex);
6437 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006438 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006439 true /* last urgent frame of batch metadata */,
6440 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006441 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006442 pthread_mutex_unlock(&mMutex);
6443 }
6444 } else if (isInputBuffer) {
6445 pthread_mutex_lock(&mMutex);
6446 handleInputBufferWithLock(frame_number);
6447 pthread_mutex_unlock(&mMutex);
6448 } else {
6449 pthread_mutex_lock(&mMutex);
6450 handleBufferWithLock(buffer, frame_number);
6451 pthread_mutex_unlock(&mMutex);
6452 }
6453 return;
6454}
6455
6456/*===========================================================================
6457 * FUNCTION : getReprocessibleOutputStreamId
6458 *
6459 * DESCRIPTION: Get source output stream id for the input reprocess stream
6460 * based on size and format, which would be the largest
6461 * output stream if an input stream exists.
6462 *
6463 * PARAMETERS :
6464 * @id : return the stream id if found
6465 *
6466 * RETURN : int32_t type of status
6467 * NO_ERROR -- success
6468 * none-zero failure code
6469 *==========================================================================*/
6470int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6471{
6472 /* check if any output or bidirectional stream with the same size and format
6473 and return that stream */
6474 if ((mInputStreamInfo.dim.width > 0) &&
6475 (mInputStreamInfo.dim.height > 0)) {
6476 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6477 it != mStreamInfo.end(); it++) {
6478
6479 camera3_stream_t *stream = (*it)->stream;
6480 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6481 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6482 (stream->format == mInputStreamInfo.format)) {
6483 // Usage flag for an input stream and the source output stream
6484 // may be different.
6485 LOGD("Found reprocessible output stream! %p", *it);
6486 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6487 stream->usage, mInputStreamInfo.usage);
6488
6489 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6490 if (channel != NULL && channel->mStreams[0]) {
6491 id = channel->mStreams[0]->getMyServerID();
6492 return NO_ERROR;
6493 }
6494 }
6495 }
6496 } else {
6497 LOGD("No input stream, so no reprocessible output stream");
6498 }
6499 return NAME_NOT_FOUND;
6500}
6501
6502/*===========================================================================
6503 * FUNCTION : lookupFwkName
6504 *
6505 * DESCRIPTION: In case the enum is not same in fwk and backend
6506 * make sure the parameter is correctly propogated
6507 *
6508 * PARAMETERS :
6509 * @arr : map between the two enums
6510 * @len : len of the map
6511 * @hal_name : name of the hal_parm to map
6512 *
6513 * RETURN : int type of status
6514 * fwk_name -- success
6515 * none-zero failure code
6516 *==========================================================================*/
6517template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6518 size_t len, halType hal_name)
6519{
6520
6521 for (size_t i = 0; i < len; i++) {
6522 if (arr[i].hal_name == hal_name) {
6523 return arr[i].fwk_name;
6524 }
6525 }
6526
6527 /* Not able to find matching framework type is not necessarily
6528 * an error case. This happens when mm-camera supports more attributes
6529 * than the frameworks do */
6530 LOGH("Cannot find matching framework type");
6531 return NAME_NOT_FOUND;
6532}
6533
6534/*===========================================================================
6535 * FUNCTION : lookupHalName
6536 *
6537 * DESCRIPTION: In case the enum is not same in fwk and backend
6538 * make sure the parameter is correctly propogated
6539 *
6540 * PARAMETERS :
6541 * @arr : map between the two enums
6542 * @len : len of the map
6543 * @fwk_name : name of the hal_parm to map
6544 *
6545 * RETURN : int32_t type of status
6546 * hal_name -- success
6547 * none-zero failure code
6548 *==========================================================================*/
6549template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6550 size_t len, fwkType fwk_name)
6551{
6552 for (size_t i = 0; i < len; i++) {
6553 if (arr[i].fwk_name == fwk_name) {
6554 return arr[i].hal_name;
6555 }
6556 }
6557
6558 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6559 return NAME_NOT_FOUND;
6560}
6561
6562/*===========================================================================
6563 * FUNCTION : lookupProp
6564 *
6565 * DESCRIPTION: lookup a value by its name
6566 *
6567 * PARAMETERS :
6568 * @arr : map between the two enums
6569 * @len : size of the map
6570 * @name : name to be looked up
6571 *
6572 * RETURN : Value if found
6573 * CAM_CDS_MODE_MAX if not found
6574 *==========================================================================*/
6575template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6576 size_t len, const char *name)
6577{
6578 if (name) {
6579 for (size_t i = 0; i < len; i++) {
6580 if (!strcmp(arr[i].desc, name)) {
6581 return arr[i].val;
6582 }
6583 }
6584 }
6585 return CAM_CDS_MODE_MAX;
6586}
6587
6588/*===========================================================================
6589 *
6590 * DESCRIPTION:
6591 *
6592 * PARAMETERS :
6593 * @metadata : metadata information from callback
6594 * @timestamp: metadata buffer timestamp
6595 * @request_id: request id
6596 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006597 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006598 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6599 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006600 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006601 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6602 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006603 *
6604 * RETURN : camera_metadata_t*
6605 * metadata in a format specified by fwk
6606 *==========================================================================*/
6607camera_metadata_t*
6608QCamera3HardwareInterface::translateFromHalMetadata(
6609 metadata_buffer_t *metadata,
6610 nsecs_t timestamp,
6611 int32_t request_id,
6612 const CameraMetadata& jpegMetadata,
6613 uint8_t pipeline_depth,
6614 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006615 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006616 /* DevCamDebug metadata translateFromHalMetadata argument */
6617 uint8_t DevCamDebug_meta_enable,
6618 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006619 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006620 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006621 bool lastMetadataInBatch,
6622 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006623{
6624 CameraMetadata camMetadata;
6625 camera_metadata_t *resultMetadata;
6626
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006627 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006628 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6629 * Timestamp is needed because it's used for shutter notify calculation.
6630 * */
6631 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6632 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006633 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006634 }
6635
Thierry Strudel3d639192016-09-09 11:52:26 -07006636 if (jpegMetadata.entryCount())
6637 camMetadata.append(jpegMetadata);
6638
6639 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6640 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6641 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6642 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006643 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006644 if (mBatchSize == 0) {
6645 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6646 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6647 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006648
Samuel Ha68ba5172016-12-15 18:41:12 -08006649 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6650 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6651 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6652 // DevCamDebug metadata translateFromHalMetadata AF
6653 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6654 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6655 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6656 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6657 }
6658 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6659 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6660 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6661 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6662 }
6663 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6664 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6665 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6666 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6667 }
6668 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6669 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6670 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6671 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6672 }
6673 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6674 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6675 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6676 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6677 }
6678 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6679 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6680 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6681 *DevCamDebug_af_monitor_pdaf_target_pos;
6682 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6683 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6684 }
6685 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6686 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6687 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6688 *DevCamDebug_af_monitor_pdaf_confidence;
6689 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6690 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6691 }
6692 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6693 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6694 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6695 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6696 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6697 }
6698 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6699 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6700 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6701 *DevCamDebug_af_monitor_tof_target_pos;
6702 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6703 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6704 }
6705 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6706 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6707 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6708 *DevCamDebug_af_monitor_tof_confidence;
6709 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6710 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6711 }
6712 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6713 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6714 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6715 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6716 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6717 }
6718 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6719 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6720 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6721 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6722 &fwk_DevCamDebug_af_monitor_type_select, 1);
6723 }
6724 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6725 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6726 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6727 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6728 &fwk_DevCamDebug_af_monitor_refocus, 1);
6729 }
6730 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6731 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6732 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6733 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6734 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6735 }
6736 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6737 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6738 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6739 *DevCamDebug_af_search_pdaf_target_pos;
6740 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6741 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6742 }
6743 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6744 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6745 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6746 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6747 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6748 }
6749 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6750 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6751 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6752 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6753 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6754 }
6755 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6756 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6757 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6758 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6759 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6760 }
6761 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6762 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6763 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6764 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6765 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6766 }
6767 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6768 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6769 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6770 *DevCamDebug_af_search_tof_target_pos;
6771 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6772 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6773 }
6774 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6775 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6776 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6777 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6778 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6779 }
6780 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6781 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6782 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6783 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6784 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6785 }
6786 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6787 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6788 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6789 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6790 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6791 }
6792 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6793 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6794 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6795 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6796 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6797 }
6798 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6799 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6800 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6801 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6802 &fwk_DevCamDebug_af_search_type_select, 1);
6803 }
6804 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6805 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6806 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6807 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6808 &fwk_DevCamDebug_af_search_next_pos, 1);
6809 }
6810 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6811 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6812 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6813 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6814 &fwk_DevCamDebug_af_search_target_pos, 1);
6815 }
6816 // DevCamDebug metadata translateFromHalMetadata AEC
6817 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6818 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6819 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6820 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6821 }
6822 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6823 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6824 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6825 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6826 }
6827 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6828 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6829 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6830 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6831 }
6832 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6833 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6834 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6835 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6836 }
6837 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6838 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6839 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6840 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6841 }
6842 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6843 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6844 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6845 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6846 }
6847 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6848 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6849 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6850 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6851 }
6852 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6853 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6854 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6855 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6856 }
Samuel Ha34229982017-02-17 13:51:11 -08006857 // DevCamDebug metadata translateFromHalMetadata zzHDR
6858 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6859 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6860 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6861 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6862 }
6863 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6864 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006865 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006866 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6867 }
6868 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6869 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6870 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6871 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6872 }
6873 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6874 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006875 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006876 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6877 }
6878 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6879 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6880 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6881 *DevCamDebug_aec_hdr_sensitivity_ratio;
6882 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6883 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6884 }
6885 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6886 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6887 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6888 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6889 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6890 }
6891 // DevCamDebug metadata translateFromHalMetadata ADRC
6892 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6893 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6894 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6895 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6896 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6897 }
6898 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6899 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6900 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6901 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6902 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6903 }
6904 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6905 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6906 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6907 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6908 }
6909 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6910 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6911 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6912 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6913 }
6914 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6915 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6916 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6917 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6918 }
6919 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6920 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6921 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6922 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6923 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006924 // DevCamDebug metadata translateFromHalMetadata AWB
6925 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6926 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6927 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6928 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6929 }
6930 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6931 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6932 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6933 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6934 }
6935 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6936 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6937 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6938 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6939 }
6940 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6941 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6942 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6943 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6944 }
6945 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6946 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6947 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6948 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6949 }
6950 }
6951 // atrace_end(ATRACE_TAG_ALWAYS);
6952
Thierry Strudel3d639192016-09-09 11:52:26 -07006953 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6954 int64_t fwk_frame_number = *frame_number;
6955 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6956 }
6957
6958 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6959 int32_t fps_range[2];
6960 fps_range[0] = (int32_t)float_range->min_fps;
6961 fps_range[1] = (int32_t)float_range->max_fps;
6962 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6963 fps_range, 2);
6964 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6965 fps_range[0], fps_range[1]);
6966 }
6967
6968 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6969 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6970 }
6971
6972 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6973 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6974 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6975 *sceneMode);
6976 if (NAME_NOT_FOUND != val) {
6977 uint8_t fwkSceneMode = (uint8_t)val;
6978 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6979 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6980 fwkSceneMode);
6981 }
6982 }
6983
6984 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6985 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6986 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6987 }
6988
6989 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6990 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6991 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6992 }
6993
6994 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6995 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6996 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6997 }
6998
6999 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7000 CAM_INTF_META_EDGE_MODE, metadata) {
7001 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7002 }
7003
7004 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7005 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7006 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7007 }
7008
7009 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7010 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7011 }
7012
7013 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7014 if (0 <= *flashState) {
7015 uint8_t fwk_flashState = (uint8_t) *flashState;
7016 if (!gCamCapability[mCameraId]->flash_available) {
7017 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7018 }
7019 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7020 }
7021 }
7022
7023 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7024 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7025 if (NAME_NOT_FOUND != val) {
7026 uint8_t fwk_flashMode = (uint8_t)val;
7027 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7028 }
7029 }
7030
7031 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7032 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7033 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7034 }
7035
7036 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7037 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7038 }
7039
7040 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7041 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7042 }
7043
7044 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7045 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7046 }
7047
7048 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7049 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7050 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7051 }
7052
7053 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7054 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7055 LOGD("fwk_videoStab = %d", fwk_videoStab);
7056 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7057 } else {
7058 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7059 // and so hardcoding the Video Stab result to OFF mode.
7060 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7061 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007062 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007063 }
7064
7065 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7066 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7067 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7068 }
7069
7070 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7071 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7072 }
7073
Thierry Strudel3d639192016-09-09 11:52:26 -07007074 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7075 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007076 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007077
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007078 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7079 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007080
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007081 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007082 blackLevelAppliedPattern->cam_black_level[0],
7083 blackLevelAppliedPattern->cam_black_level[1],
7084 blackLevelAppliedPattern->cam_black_level[2],
7085 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007086 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7087 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007088
7089#ifndef USE_HAL_3_3
7090 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307091 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007092 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307093 fwk_blackLevelInd[0] /= 16.0;
7094 fwk_blackLevelInd[1] /= 16.0;
7095 fwk_blackLevelInd[2] /= 16.0;
7096 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007097 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7098 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007099#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007100 }
7101
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007102#ifndef USE_HAL_3_3
7103 // Fixed whitelevel is used by ISP/Sensor
7104 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7105 &gCamCapability[mCameraId]->white_level, 1);
7106#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007107
7108 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7109 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7110 int32_t scalerCropRegion[4];
7111 scalerCropRegion[0] = hScalerCropRegion->left;
7112 scalerCropRegion[1] = hScalerCropRegion->top;
7113 scalerCropRegion[2] = hScalerCropRegion->width;
7114 scalerCropRegion[3] = hScalerCropRegion->height;
7115
7116 // Adjust crop region from sensor output coordinate system to active
7117 // array coordinate system.
7118 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7119 scalerCropRegion[2], scalerCropRegion[3]);
7120
7121 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7122 }
7123
7124 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7125 LOGD("sensorExpTime = %lld", *sensorExpTime);
7126 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7127 }
7128
7129 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7130 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7131 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7132 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7133 }
7134
7135 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7136 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7137 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7138 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7139 sensorRollingShutterSkew, 1);
7140 }
7141
7142 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7143 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7144 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7145
7146 //calculate the noise profile based on sensitivity
7147 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7148 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7149 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7150 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7151 noise_profile[i] = noise_profile_S;
7152 noise_profile[i+1] = noise_profile_O;
7153 }
7154 LOGD("noise model entry (S, O) is (%f, %f)",
7155 noise_profile_S, noise_profile_O);
7156 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7157 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7158 }
7159
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007160#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007161 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007162 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007163 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007164 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007165 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7166 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7167 }
7168 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007169#endif
7170
Thierry Strudel3d639192016-09-09 11:52:26 -07007171 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7172 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7173 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7174 }
7175
7176 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7177 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7178 *faceDetectMode);
7179 if (NAME_NOT_FOUND != val) {
7180 uint8_t fwk_faceDetectMode = (uint8_t)val;
7181 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7182
7183 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7184 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7185 CAM_INTF_META_FACE_DETECTION, metadata) {
7186 uint8_t numFaces = MIN(
7187 faceDetectionInfo->num_faces_detected, MAX_ROI);
7188 int32_t faceIds[MAX_ROI];
7189 uint8_t faceScores[MAX_ROI];
7190 int32_t faceRectangles[MAX_ROI * 4];
7191 int32_t faceLandmarks[MAX_ROI * 6];
7192 size_t j = 0, k = 0;
7193
7194 for (size_t i = 0; i < numFaces; i++) {
7195 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7196 // Adjust crop region from sensor output coordinate system to active
7197 // array coordinate system.
7198 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7199 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7200 rect.width, rect.height);
7201
7202 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7203 faceRectangles+j, -1);
7204
Jason Lee8ce36fa2017-04-19 19:40:37 -07007205 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7206 "bottom-right (%d, %d)",
7207 faceDetectionInfo->frame_id, i,
7208 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7209 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7210
Thierry Strudel3d639192016-09-09 11:52:26 -07007211 j+= 4;
7212 }
7213 if (numFaces <= 0) {
7214 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7215 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7216 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7217 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7218 }
7219
7220 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7221 numFaces);
7222 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7223 faceRectangles, numFaces * 4U);
7224 if (fwk_faceDetectMode ==
7225 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7226 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7227 CAM_INTF_META_FACE_LANDMARK, metadata) {
7228
7229 for (size_t i = 0; i < numFaces; i++) {
7230 // Map the co-ordinate sensor output coordinate system to active
7231 // array coordinate system.
7232 mCropRegionMapper.toActiveArray(
7233 landmarks->face_landmarks[i].left_eye_center.x,
7234 landmarks->face_landmarks[i].left_eye_center.y);
7235 mCropRegionMapper.toActiveArray(
7236 landmarks->face_landmarks[i].right_eye_center.x,
7237 landmarks->face_landmarks[i].right_eye_center.y);
7238 mCropRegionMapper.toActiveArray(
7239 landmarks->face_landmarks[i].mouth_center.x,
7240 landmarks->face_landmarks[i].mouth_center.y);
7241
7242 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007243
7244 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7245 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7246 faceDetectionInfo->frame_id, i,
7247 faceLandmarks[k + LEFT_EYE_X],
7248 faceLandmarks[k + LEFT_EYE_Y],
7249 faceLandmarks[k + RIGHT_EYE_X],
7250 faceLandmarks[k + RIGHT_EYE_Y],
7251 faceLandmarks[k + MOUTH_X],
7252 faceLandmarks[k + MOUTH_Y]);
7253
Thierry Strudel04e026f2016-10-10 11:27:36 -07007254 k+= TOTAL_LANDMARK_INDICES;
7255 }
7256 } else {
7257 for (size_t i = 0; i < numFaces; i++) {
7258 setInvalidLandmarks(faceLandmarks+k);
7259 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007260 }
7261 }
7262
Jason Lee49619db2017-04-13 12:07:22 -07007263 for (size_t i = 0; i < numFaces; i++) {
7264 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7265
7266 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7267 faceDetectionInfo->frame_id, i, faceIds[i]);
7268 }
7269
Thierry Strudel3d639192016-09-09 11:52:26 -07007270 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7271 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7272 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007273 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007274 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7275 CAM_INTF_META_FACE_BLINK, metadata) {
7276 uint8_t detected[MAX_ROI];
7277 uint8_t degree[MAX_ROI * 2];
7278 for (size_t i = 0; i < numFaces; i++) {
7279 detected[i] = blinks->blink[i].blink_detected;
7280 degree[2 * i] = blinks->blink[i].left_blink;
7281 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007282
Jason Lee49619db2017-04-13 12:07:22 -07007283 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7284 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7285 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7286 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007287 }
7288 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7289 detected, numFaces);
7290 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7291 degree, numFaces * 2);
7292 }
7293 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7294 CAM_INTF_META_FACE_SMILE, metadata) {
7295 uint8_t degree[MAX_ROI];
7296 uint8_t confidence[MAX_ROI];
7297 for (size_t i = 0; i < numFaces; i++) {
7298 degree[i] = smiles->smile[i].smile_degree;
7299 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007300
Jason Lee49619db2017-04-13 12:07:22 -07007301 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7302 "smile_degree=%d, smile_score=%d",
7303 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007304 }
7305 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7306 degree, numFaces);
7307 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7308 confidence, numFaces);
7309 }
7310 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7311 CAM_INTF_META_FACE_GAZE, metadata) {
7312 int8_t angle[MAX_ROI];
7313 int32_t direction[MAX_ROI * 3];
7314 int8_t degree[MAX_ROI * 2];
7315 for (size_t i = 0; i < numFaces; i++) {
7316 angle[i] = gazes->gaze[i].gaze_angle;
7317 direction[3 * i] = gazes->gaze[i].updown_dir;
7318 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7319 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7320 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7321 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007322
7323 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7324 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7325 "left_right_gaze=%d, top_bottom_gaze=%d",
7326 faceDetectionInfo->frame_id, i, angle[i],
7327 direction[3 * i], direction[3 * i + 1],
7328 direction[3 * i + 2],
7329 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007330 }
7331 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7332 (uint8_t *)angle, numFaces);
7333 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7334 direction, numFaces * 3);
7335 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7336 (uint8_t *)degree, numFaces * 2);
7337 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007338 }
7339 }
7340 }
7341 }
7342
7343 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7344 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007345 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007346 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007347 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007348
Shuzhen Wang14415f52016-11-16 18:26:18 -08007349 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7350 histogramBins = *histBins;
7351 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7352 }
7353
7354 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007355 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7356 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007357 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007358
7359 switch (stats_data->type) {
7360 case CAM_HISTOGRAM_TYPE_BAYER:
7361 switch (stats_data->bayer_stats.data_type) {
7362 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007363 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7364 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007365 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007366 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7367 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007368 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007369 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7370 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007371 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007372 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007373 case CAM_STATS_CHANNEL_R:
7374 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007375 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7376 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007377 }
7378 break;
7379 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007380 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007381 break;
7382 }
7383
Shuzhen Wang14415f52016-11-16 18:26:18 -08007384 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007385 }
7386 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007387 }
7388
7389 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7390 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7391 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7392 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7393 }
7394
7395 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7396 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7397 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7398 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7399 }
7400
7401 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7402 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7403 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7404 CAM_MAX_SHADING_MAP_HEIGHT);
7405 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7406 CAM_MAX_SHADING_MAP_WIDTH);
7407 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7408 lensShadingMap->lens_shading, 4U * map_width * map_height);
7409 }
7410
7411 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7412 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7413 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7414 }
7415
7416 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7417 //Populate CAM_INTF_META_TONEMAP_CURVES
7418 /* ch0 = G, ch 1 = B, ch 2 = R*/
7419 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7420 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7421 tonemap->tonemap_points_cnt,
7422 CAM_MAX_TONEMAP_CURVE_SIZE);
7423 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7424 }
7425
7426 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7427 &tonemap->curves[0].tonemap_points[0][0],
7428 tonemap->tonemap_points_cnt * 2);
7429
7430 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7431 &tonemap->curves[1].tonemap_points[0][0],
7432 tonemap->tonemap_points_cnt * 2);
7433
7434 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7435 &tonemap->curves[2].tonemap_points[0][0],
7436 tonemap->tonemap_points_cnt * 2);
7437 }
7438
7439 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7440 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7441 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7442 CC_GAIN_MAX);
7443 }
7444
7445 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7446 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7447 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7448 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7449 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7450 }
7451
7452 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7453 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7454 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7455 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7456 toneCurve->tonemap_points_cnt,
7457 CAM_MAX_TONEMAP_CURVE_SIZE);
7458 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7459 }
7460 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7461 (float*)toneCurve->curve.tonemap_points,
7462 toneCurve->tonemap_points_cnt * 2);
7463 }
7464
7465 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7466 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7467 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7468 predColorCorrectionGains->gains, 4);
7469 }
7470
7471 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7472 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7473 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7474 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7475 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7476 }
7477
7478 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7479 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7480 }
7481
7482 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7483 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7484 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7485 }
7486
7487 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7488 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7489 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7490 }
7491
7492 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7493 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7494 *effectMode);
7495 if (NAME_NOT_FOUND != val) {
7496 uint8_t fwk_effectMode = (uint8_t)val;
7497 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7498 }
7499 }
7500
7501 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7502 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7503 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7504 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7505 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7506 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7507 }
7508 int32_t fwk_testPatternData[4];
7509 fwk_testPatternData[0] = testPatternData->r;
7510 fwk_testPatternData[3] = testPatternData->b;
7511 switch (gCamCapability[mCameraId]->color_arrangement) {
7512 case CAM_FILTER_ARRANGEMENT_RGGB:
7513 case CAM_FILTER_ARRANGEMENT_GRBG:
7514 fwk_testPatternData[1] = testPatternData->gr;
7515 fwk_testPatternData[2] = testPatternData->gb;
7516 break;
7517 case CAM_FILTER_ARRANGEMENT_GBRG:
7518 case CAM_FILTER_ARRANGEMENT_BGGR:
7519 fwk_testPatternData[2] = testPatternData->gr;
7520 fwk_testPatternData[1] = testPatternData->gb;
7521 break;
7522 default:
7523 LOGE("color arrangement %d is not supported",
7524 gCamCapability[mCameraId]->color_arrangement);
7525 break;
7526 }
7527 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7528 }
7529
7530 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7531 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7532 }
7533
7534 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7535 String8 str((const char *)gps_methods);
7536 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7537 }
7538
7539 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7540 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7541 }
7542
7543 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7544 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7545 }
7546
7547 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7548 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7549 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7550 }
7551
7552 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7553 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7554 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7555 }
7556
7557 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7558 int32_t fwk_thumb_size[2];
7559 fwk_thumb_size[0] = thumb_size->width;
7560 fwk_thumb_size[1] = thumb_size->height;
7561 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7562 }
7563
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007564 // Skip reprocess metadata if there is no input stream.
7565 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7566 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7567 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7568 privateData,
7569 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007571 }
7572
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007573 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007574 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007575 meteringMode, 1);
7576 }
7577
Thierry Strudel54dc9782017-02-15 12:12:10 -08007578 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7579 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7580 LOGD("hdr_scene_data: %d %f\n",
7581 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7582 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7583 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7584 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7585 &isHdr, 1);
7586 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7587 &isHdrConfidence, 1);
7588 }
7589
7590
7591
Thierry Strudel3d639192016-09-09 11:52:26 -07007592 if (metadata->is_tuning_params_valid) {
7593 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7594 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7595 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7596
7597
7598 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7599 sizeof(uint32_t));
7600 data += sizeof(uint32_t);
7601
7602 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7603 sizeof(uint32_t));
7604 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7605 data += sizeof(uint32_t);
7606
7607 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7608 sizeof(uint32_t));
7609 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7610 data += sizeof(uint32_t);
7611
7612 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7613 sizeof(uint32_t));
7614 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7615 data += sizeof(uint32_t);
7616
7617 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7618 sizeof(uint32_t));
7619 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7620 data += sizeof(uint32_t);
7621
7622 metadata->tuning_params.tuning_mod3_data_size = 0;
7623 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7624 sizeof(uint32_t));
7625 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7626 data += sizeof(uint32_t);
7627
7628 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7629 TUNING_SENSOR_DATA_MAX);
7630 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7631 count);
7632 data += count;
7633
7634 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7635 TUNING_VFE_DATA_MAX);
7636 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7637 count);
7638 data += count;
7639
7640 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7641 TUNING_CPP_DATA_MAX);
7642 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7643 count);
7644 data += count;
7645
7646 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7647 TUNING_CAC_DATA_MAX);
7648 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7649 count);
7650 data += count;
7651
7652 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7653 (int32_t *)(void *)tuning_meta_data_blob,
7654 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7655 }
7656
7657 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7658 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7659 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7660 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7661 NEUTRAL_COL_POINTS);
7662 }
7663
7664 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7665 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7666 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7667 }
7668
7669 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7670 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7671 // Adjust crop region from sensor output coordinate system to active
7672 // array coordinate system.
7673 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7674 hAeRegions->rect.width, hAeRegions->rect.height);
7675
7676 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7677 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7678 REGIONS_TUPLE_COUNT);
7679 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7680 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7681 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7682 hAeRegions->rect.height);
7683 }
7684
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007685 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7686 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7687 if (NAME_NOT_FOUND != val) {
7688 uint8_t fwkAfMode = (uint8_t)val;
7689 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7690 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7691 } else {
7692 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7693 val);
7694 }
7695 }
7696
Thierry Strudel3d639192016-09-09 11:52:26 -07007697 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7698 uint8_t fwk_afState = (uint8_t) *afState;
7699 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007700 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007701 }
7702
7703 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7704 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7705 }
7706
7707 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7708 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7709 }
7710
7711 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7712 uint8_t fwk_lensState = *lensState;
7713 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7714 }
7715
Thierry Strudel3d639192016-09-09 11:52:26 -07007716
7717 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007718 uint32_t ab_mode = *hal_ab_mode;
7719 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7720 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7721 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7722 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007723 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007724 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007725 if (NAME_NOT_FOUND != val) {
7726 uint8_t fwk_ab_mode = (uint8_t)val;
7727 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7728 }
7729 }
7730
7731 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7732 int val = lookupFwkName(SCENE_MODES_MAP,
7733 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7734 if (NAME_NOT_FOUND != val) {
7735 uint8_t fwkBestshotMode = (uint8_t)val;
7736 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7737 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7738 } else {
7739 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7740 }
7741 }
7742
7743 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7744 uint8_t fwk_mode = (uint8_t) *mode;
7745 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7746 }
7747
7748 /* Constant metadata values to be update*/
7749 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7750 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7751
7752 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7753 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7754
7755 int32_t hotPixelMap[2];
7756 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7757
7758 // CDS
7759 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7760 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7761 }
7762
Thierry Strudel04e026f2016-10-10 11:27:36 -07007763 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7764 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007765 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007766 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7767 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7768 } else {
7769 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7770 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007771
7772 if(fwk_hdr != curr_hdr_state) {
7773 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7774 if(fwk_hdr)
7775 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7776 else
7777 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7778 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007779 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7780 }
7781
Thierry Strudel54dc9782017-02-15 12:12:10 -08007782 //binning correction
7783 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7784 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7785 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7786 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7787 }
7788
Thierry Strudel04e026f2016-10-10 11:27:36 -07007789 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007790 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007791 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7792 int8_t is_ir_on = 0;
7793
7794 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7795 if(is_ir_on != curr_ir_state) {
7796 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7797 if(is_ir_on)
7798 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7799 else
7800 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7801 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007802 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007803 }
7804
Thierry Strudel269c81a2016-10-12 12:13:59 -07007805 // AEC SPEED
7806 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7807 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7808 }
7809
7810 // AWB SPEED
7811 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7812 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7813 }
7814
Thierry Strudel3d639192016-09-09 11:52:26 -07007815 // TNR
7816 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7817 uint8_t tnr_enable = tnr->denoise_enable;
7818 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007819 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7820 int8_t is_tnr_on = 0;
7821
7822 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7823 if(is_tnr_on != curr_tnr_state) {
7824 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7825 if(is_tnr_on)
7826 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7827 else
7828 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7829 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007830
7831 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7832 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7833 }
7834
7835 // Reprocess crop data
7836 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7837 uint8_t cnt = crop_data->num_of_streams;
7838 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7839 // mm-qcamera-daemon only posts crop_data for streams
7840 // not linked to pproc. So no valid crop metadata is not
7841 // necessarily an error case.
7842 LOGD("No valid crop metadata entries");
7843 } else {
7844 uint32_t reproc_stream_id;
7845 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7846 LOGD("No reprocessible stream found, ignore crop data");
7847 } else {
7848 int rc = NO_ERROR;
7849 Vector<int32_t> roi_map;
7850 int32_t *crop = new int32_t[cnt*4];
7851 if (NULL == crop) {
7852 rc = NO_MEMORY;
7853 }
7854 if (NO_ERROR == rc) {
7855 int32_t streams_found = 0;
7856 for (size_t i = 0; i < cnt; i++) {
7857 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7858 if (pprocDone) {
7859 // HAL already does internal reprocessing,
7860 // either via reprocessing before JPEG encoding,
7861 // or offline postprocessing for pproc bypass case.
7862 crop[0] = 0;
7863 crop[1] = 0;
7864 crop[2] = mInputStreamInfo.dim.width;
7865 crop[3] = mInputStreamInfo.dim.height;
7866 } else {
7867 crop[0] = crop_data->crop_info[i].crop.left;
7868 crop[1] = crop_data->crop_info[i].crop.top;
7869 crop[2] = crop_data->crop_info[i].crop.width;
7870 crop[3] = crop_data->crop_info[i].crop.height;
7871 }
7872 roi_map.add(crop_data->crop_info[i].roi_map.left);
7873 roi_map.add(crop_data->crop_info[i].roi_map.top);
7874 roi_map.add(crop_data->crop_info[i].roi_map.width);
7875 roi_map.add(crop_data->crop_info[i].roi_map.height);
7876 streams_found++;
7877 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7878 crop[0], crop[1], crop[2], crop[3]);
7879 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7880 crop_data->crop_info[i].roi_map.left,
7881 crop_data->crop_info[i].roi_map.top,
7882 crop_data->crop_info[i].roi_map.width,
7883 crop_data->crop_info[i].roi_map.height);
7884 break;
7885
7886 }
7887 }
7888 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7889 &streams_found, 1);
7890 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7891 crop, (size_t)(streams_found * 4));
7892 if (roi_map.array()) {
7893 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7894 roi_map.array(), roi_map.size());
7895 }
7896 }
7897 if (crop) {
7898 delete [] crop;
7899 }
7900 }
7901 }
7902 }
7903
7904 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7905 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7906 // so hardcoding the CAC result to OFF mode.
7907 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7908 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7909 } else {
7910 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7911 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7912 *cacMode);
7913 if (NAME_NOT_FOUND != val) {
7914 uint8_t resultCacMode = (uint8_t)val;
7915 // check whether CAC result from CB is equal to Framework set CAC mode
7916 // If not equal then set the CAC mode came in corresponding request
7917 if (fwk_cacMode != resultCacMode) {
7918 resultCacMode = fwk_cacMode;
7919 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007920 //Check if CAC is disabled by property
7921 if (m_cacModeDisabled) {
7922 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7923 }
7924
Thierry Strudel3d639192016-09-09 11:52:26 -07007925 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7926 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7927 } else {
7928 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7929 }
7930 }
7931 }
7932
7933 // Post blob of cam_cds_data through vendor tag.
7934 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7935 uint8_t cnt = cdsInfo->num_of_streams;
7936 cam_cds_data_t cdsDataOverride;
7937 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7938 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7939 cdsDataOverride.num_of_streams = 1;
7940 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7941 uint32_t reproc_stream_id;
7942 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7943 LOGD("No reprocessible stream found, ignore cds data");
7944 } else {
7945 for (size_t i = 0; i < cnt; i++) {
7946 if (cdsInfo->cds_info[i].stream_id ==
7947 reproc_stream_id) {
7948 cdsDataOverride.cds_info[0].cds_enable =
7949 cdsInfo->cds_info[i].cds_enable;
7950 break;
7951 }
7952 }
7953 }
7954 } else {
7955 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7956 }
7957 camMetadata.update(QCAMERA3_CDS_INFO,
7958 (uint8_t *)&cdsDataOverride,
7959 sizeof(cam_cds_data_t));
7960 }
7961
7962 // Ldaf calibration data
7963 if (!mLdafCalibExist) {
7964 IF_META_AVAILABLE(uint32_t, ldafCalib,
7965 CAM_INTF_META_LDAF_EXIF, metadata) {
7966 mLdafCalibExist = true;
7967 mLdafCalib[0] = ldafCalib[0];
7968 mLdafCalib[1] = ldafCalib[1];
7969 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7970 ldafCalib[0], ldafCalib[1]);
7971 }
7972 }
7973
Thierry Strudel54dc9782017-02-15 12:12:10 -08007974 // EXIF debug data through vendor tag
7975 /*
7976 * Mobicat Mask can assume 3 values:
7977 * 1 refers to Mobicat data,
7978 * 2 refers to Stats Debug and Exif Debug Data
7979 * 3 refers to Mobicat and Stats Debug Data
7980 * We want to make sure that we are sending Exif debug data
7981 * only when Mobicat Mask is 2.
7982 */
7983 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7984 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7985 (uint8_t *)(void *)mExifParams.debug_params,
7986 sizeof(mm_jpeg_debug_exif_params_t));
7987 }
7988
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007989 // Reprocess and DDM debug data through vendor tag
7990 cam_reprocess_info_t repro_info;
7991 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007992 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7993 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007994 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007995 }
7996 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7997 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007998 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007999 }
8000 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8001 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008002 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008003 }
8004 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8005 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008006 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008007 }
8008 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8009 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008010 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008011 }
8012 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008013 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008014 }
8015 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8016 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008017 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008018 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008019 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8020 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8021 }
8022 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8023 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8024 }
8025 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8026 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008027
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008028 // INSTANT AEC MODE
8029 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8030 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8031 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8032 }
8033
Shuzhen Wange763e802016-03-31 10:24:29 -07008034 // AF scene change
8035 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8036 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8037 }
8038
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008039 // Enable ZSL
8040 if (enableZsl != nullptr) {
8041 uint8_t value = *enableZsl ?
8042 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8043 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8044 }
8045
Xu Han821ea9c2017-05-23 09:00:40 -07008046 // OIS Data
8047 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8048 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8049 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8050 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8051 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8052 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8053 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8054 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8055 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8056 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8057 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8058 }
8059
Thierry Strudel3d639192016-09-09 11:52:26 -07008060 resultMetadata = camMetadata.release();
8061 return resultMetadata;
8062}
8063
8064/*===========================================================================
8065 * FUNCTION : saveExifParams
8066 *
8067 * DESCRIPTION:
8068 *
8069 * PARAMETERS :
8070 * @metadata : metadata information from callback
8071 *
8072 * RETURN : none
8073 *
8074 *==========================================================================*/
8075void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8076{
8077 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8078 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8079 if (mExifParams.debug_params) {
8080 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8081 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8082 }
8083 }
8084 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8085 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8086 if (mExifParams.debug_params) {
8087 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8088 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8089 }
8090 }
8091 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8092 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8093 if (mExifParams.debug_params) {
8094 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8095 mExifParams.debug_params->af_debug_params_valid = TRUE;
8096 }
8097 }
8098 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8099 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8100 if (mExifParams.debug_params) {
8101 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8102 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8103 }
8104 }
8105 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8106 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8107 if (mExifParams.debug_params) {
8108 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8109 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8110 }
8111 }
8112 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8113 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8114 if (mExifParams.debug_params) {
8115 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8116 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8117 }
8118 }
8119 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8120 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8121 if (mExifParams.debug_params) {
8122 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8123 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8124 }
8125 }
8126 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8127 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8128 if (mExifParams.debug_params) {
8129 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8130 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8131 }
8132 }
8133}
8134
8135/*===========================================================================
8136 * FUNCTION : get3AExifParams
8137 *
8138 * DESCRIPTION:
8139 *
8140 * PARAMETERS : none
8141 *
8142 *
8143 * RETURN : mm_jpeg_exif_params_t
8144 *
8145 *==========================================================================*/
8146mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8147{
8148 return mExifParams;
8149}
8150
8151/*===========================================================================
8152 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8153 *
8154 * DESCRIPTION:
8155 *
8156 * PARAMETERS :
8157 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008158 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8159 * urgent metadata in a batch. Always true for
8160 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008161 *
8162 * RETURN : camera_metadata_t*
8163 * metadata in a format specified by fwk
8164 *==========================================================================*/
8165camera_metadata_t*
8166QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008167 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008168{
8169 CameraMetadata camMetadata;
8170 camera_metadata_t *resultMetadata;
8171
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008172 if (!lastUrgentMetadataInBatch) {
8173 /* In batch mode, use empty metadata if this is not the last in batch
8174 */
8175 resultMetadata = allocate_camera_metadata(0, 0);
8176 return resultMetadata;
8177 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008178
8179 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8180 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8181 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8182 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8183 }
8184
8185 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8186 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8187 &aecTrigger->trigger, 1);
8188 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8189 &aecTrigger->trigger_id, 1);
8190 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8191 aecTrigger->trigger);
8192 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8193 aecTrigger->trigger_id);
8194 }
8195
8196 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8197 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8198 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8199 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8200 }
8201
Thierry Strudel3d639192016-09-09 11:52:26 -07008202 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8203 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8204 &af_trigger->trigger, 1);
8205 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8206 af_trigger->trigger);
8207 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8208 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8209 af_trigger->trigger_id);
8210 }
8211
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008212 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8213 /*af regions*/
8214 int32_t afRegions[REGIONS_TUPLE_COUNT];
8215 // Adjust crop region from sensor output coordinate system to active
8216 // array coordinate system.
8217 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8218 hAfRegions->rect.width, hAfRegions->rect.height);
8219
8220 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8221 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8222 REGIONS_TUPLE_COUNT);
8223 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8224 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8225 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8226 hAfRegions->rect.height);
8227 }
8228
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008229 // AF region confidence
8230 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8231 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8232 }
8233
Thierry Strudel3d639192016-09-09 11:52:26 -07008234 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8235 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8236 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8237 if (NAME_NOT_FOUND != val) {
8238 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8239 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8240 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8241 } else {
8242 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8243 }
8244 }
8245
8246 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8247 uint32_t aeMode = CAM_AE_MODE_MAX;
8248 int32_t flashMode = CAM_FLASH_MODE_MAX;
8249 int32_t redeye = -1;
8250 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8251 aeMode = *pAeMode;
8252 }
8253 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8254 flashMode = *pFlashMode;
8255 }
8256 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8257 redeye = *pRedeye;
8258 }
8259
8260 if (1 == redeye) {
8261 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8262 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8263 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8264 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8265 flashMode);
8266 if (NAME_NOT_FOUND != val) {
8267 fwk_aeMode = (uint8_t)val;
8268 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8269 } else {
8270 LOGE("Unsupported flash mode %d", flashMode);
8271 }
8272 } else if (aeMode == CAM_AE_MODE_ON) {
8273 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8274 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8275 } else if (aeMode == CAM_AE_MODE_OFF) {
8276 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8277 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008278 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8279 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8280 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008281 } else {
8282 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8283 "flashMode:%d, aeMode:%u!!!",
8284 redeye, flashMode, aeMode);
8285 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008286 if (mInstantAEC) {
8287 // Increment frame Idx count untill a bound reached for instant AEC.
8288 mInstantAecFrameIdxCount++;
8289 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8290 CAM_INTF_META_AEC_INFO, metadata) {
8291 LOGH("ae_params->settled = %d",ae_params->settled);
8292 // If AEC settled, or if number of frames reached bound value,
8293 // should reset instant AEC.
8294 if (ae_params->settled ||
8295 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8296 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8297 mInstantAEC = false;
8298 mResetInstantAEC = true;
8299 mInstantAecFrameIdxCount = 0;
8300 }
8301 }
8302 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008303 resultMetadata = camMetadata.release();
8304 return resultMetadata;
8305}
8306
8307/*===========================================================================
8308 * FUNCTION : dumpMetadataToFile
8309 *
8310 * DESCRIPTION: Dumps tuning metadata to file system
8311 *
8312 * PARAMETERS :
8313 * @meta : tuning metadata
8314 * @dumpFrameCount : current dump frame count
8315 * @enabled : Enable mask
8316 *
8317 *==========================================================================*/
8318void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8319 uint32_t &dumpFrameCount,
8320 bool enabled,
8321 const char *type,
8322 uint32_t frameNumber)
8323{
8324 //Some sanity checks
8325 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8326 LOGE("Tuning sensor data size bigger than expected %d: %d",
8327 meta.tuning_sensor_data_size,
8328 TUNING_SENSOR_DATA_MAX);
8329 return;
8330 }
8331
8332 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8333 LOGE("Tuning VFE data size bigger than expected %d: %d",
8334 meta.tuning_vfe_data_size,
8335 TUNING_VFE_DATA_MAX);
8336 return;
8337 }
8338
8339 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8340 LOGE("Tuning CPP data size bigger than expected %d: %d",
8341 meta.tuning_cpp_data_size,
8342 TUNING_CPP_DATA_MAX);
8343 return;
8344 }
8345
8346 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8347 LOGE("Tuning CAC data size bigger than expected %d: %d",
8348 meta.tuning_cac_data_size,
8349 TUNING_CAC_DATA_MAX);
8350 return;
8351 }
8352 //
8353
8354 if(enabled){
8355 char timeBuf[FILENAME_MAX];
8356 char buf[FILENAME_MAX];
8357 memset(buf, 0, sizeof(buf));
8358 memset(timeBuf, 0, sizeof(timeBuf));
8359 time_t current_time;
8360 struct tm * timeinfo;
8361 time (&current_time);
8362 timeinfo = localtime (&current_time);
8363 if (timeinfo != NULL) {
8364 strftime (timeBuf, sizeof(timeBuf),
8365 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8366 }
8367 String8 filePath(timeBuf);
8368 snprintf(buf,
8369 sizeof(buf),
8370 "%dm_%s_%d.bin",
8371 dumpFrameCount,
8372 type,
8373 frameNumber);
8374 filePath.append(buf);
8375 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8376 if (file_fd >= 0) {
8377 ssize_t written_len = 0;
8378 meta.tuning_data_version = TUNING_DATA_VERSION;
8379 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8380 written_len += write(file_fd, data, sizeof(uint32_t));
8381 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8382 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8383 written_len += write(file_fd, data, sizeof(uint32_t));
8384 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8385 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8386 written_len += write(file_fd, data, sizeof(uint32_t));
8387 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8388 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8389 written_len += write(file_fd, data, sizeof(uint32_t));
8390 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8391 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8392 written_len += write(file_fd, data, sizeof(uint32_t));
8393 meta.tuning_mod3_data_size = 0;
8394 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8395 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8396 written_len += write(file_fd, data, sizeof(uint32_t));
8397 size_t total_size = meta.tuning_sensor_data_size;
8398 data = (void *)((uint8_t *)&meta.data);
8399 written_len += write(file_fd, data, total_size);
8400 total_size = meta.tuning_vfe_data_size;
8401 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8402 written_len += write(file_fd, data, total_size);
8403 total_size = meta.tuning_cpp_data_size;
8404 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8405 written_len += write(file_fd, data, total_size);
8406 total_size = meta.tuning_cac_data_size;
8407 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8408 written_len += write(file_fd, data, total_size);
8409 close(file_fd);
8410 }else {
8411 LOGE("fail to open file for metadata dumping");
8412 }
8413 }
8414}
8415
8416/*===========================================================================
8417 * FUNCTION : cleanAndSortStreamInfo
8418 *
8419 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8420 * and sort them such that raw stream is at the end of the list
8421 * This is a workaround for camera daemon constraint.
8422 *
8423 * PARAMETERS : None
8424 *
8425 *==========================================================================*/
8426void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8427{
8428 List<stream_info_t *> newStreamInfo;
8429
8430 /*clean up invalid streams*/
8431 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8432 it != mStreamInfo.end();) {
8433 if(((*it)->status) == INVALID){
8434 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8435 delete channel;
8436 free(*it);
8437 it = mStreamInfo.erase(it);
8438 } else {
8439 it++;
8440 }
8441 }
8442
8443 // Move preview/video/callback/snapshot streams into newList
8444 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8445 it != mStreamInfo.end();) {
8446 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8447 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8448 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8449 newStreamInfo.push_back(*it);
8450 it = mStreamInfo.erase(it);
8451 } else
8452 it++;
8453 }
8454 // Move raw streams into newList
8455 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8456 it != mStreamInfo.end();) {
8457 newStreamInfo.push_back(*it);
8458 it = mStreamInfo.erase(it);
8459 }
8460
8461 mStreamInfo = newStreamInfo;
8462}
8463
8464/*===========================================================================
8465 * FUNCTION : extractJpegMetadata
8466 *
8467 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8468 * JPEG metadata is cached in HAL, and return as part of capture
8469 * result when metadata is returned from camera daemon.
8470 *
8471 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8472 * @request: capture request
8473 *
8474 *==========================================================================*/
8475void QCamera3HardwareInterface::extractJpegMetadata(
8476 CameraMetadata& jpegMetadata,
8477 const camera3_capture_request_t *request)
8478{
8479 CameraMetadata frame_settings;
8480 frame_settings = request->settings;
8481
8482 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8483 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8484 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8485 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8486
8487 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8488 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8489 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8490 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8491
8492 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8493 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8494 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8495 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8496
8497 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8498 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8499 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8500 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8501
8502 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8503 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8504 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8505 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8506
8507 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8508 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8509 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8510 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8511
8512 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8513 int32_t thumbnail_size[2];
8514 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8515 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8516 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8517 int32_t orientation =
8518 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008519 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008520 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8521 int32_t temp;
8522 temp = thumbnail_size[0];
8523 thumbnail_size[0] = thumbnail_size[1];
8524 thumbnail_size[1] = temp;
8525 }
8526 }
8527 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8528 thumbnail_size,
8529 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8530 }
8531
8532}
8533
8534/*===========================================================================
8535 * FUNCTION : convertToRegions
8536 *
8537 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8538 *
8539 * PARAMETERS :
8540 * @rect : cam_rect_t struct to convert
8541 * @region : int32_t destination array
8542 * @weight : if we are converting from cam_area_t, weight is valid
8543 * else weight = -1
8544 *
8545 *==========================================================================*/
8546void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8547 int32_t *region, int weight)
8548{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008549 region[FACE_LEFT] = rect.left;
8550 region[FACE_TOP] = rect.top;
8551 region[FACE_RIGHT] = rect.left + rect.width;
8552 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008553 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008554 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008555 }
8556}
8557
8558/*===========================================================================
8559 * FUNCTION : convertFromRegions
8560 *
8561 * DESCRIPTION: helper method to convert from array to cam_rect_t
8562 *
8563 * PARAMETERS :
8564 * @rect : cam_rect_t struct to convert
8565 * @region : int32_t destination array
8566 * @weight : if we are converting from cam_area_t, weight is valid
8567 * else weight = -1
8568 *
8569 *==========================================================================*/
8570void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008571 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008572{
Thierry Strudel3d639192016-09-09 11:52:26 -07008573 int32_t x_min = frame_settings.find(tag).data.i32[0];
8574 int32_t y_min = frame_settings.find(tag).data.i32[1];
8575 int32_t x_max = frame_settings.find(tag).data.i32[2];
8576 int32_t y_max = frame_settings.find(tag).data.i32[3];
8577 roi.weight = frame_settings.find(tag).data.i32[4];
8578 roi.rect.left = x_min;
8579 roi.rect.top = y_min;
8580 roi.rect.width = x_max - x_min;
8581 roi.rect.height = y_max - y_min;
8582}
8583
8584/*===========================================================================
8585 * FUNCTION : resetIfNeededROI
8586 *
8587 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8588 * crop region
8589 *
8590 * PARAMETERS :
8591 * @roi : cam_area_t struct to resize
8592 * @scalerCropRegion : cam_crop_region_t region to compare against
8593 *
8594 *
8595 *==========================================================================*/
8596bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8597 const cam_crop_region_t* scalerCropRegion)
8598{
8599 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8600 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8601 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8602 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8603
8604 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8605 * without having this check the calculations below to validate if the roi
8606 * is inside scalar crop region will fail resulting in the roi not being
8607 * reset causing algorithm to continue to use stale roi window
8608 */
8609 if (roi->weight == 0) {
8610 return true;
8611 }
8612
8613 if ((roi_x_max < scalerCropRegion->left) ||
8614 // right edge of roi window is left of scalar crop's left edge
8615 (roi_y_max < scalerCropRegion->top) ||
8616 // bottom edge of roi window is above scalar crop's top edge
8617 (roi->rect.left > crop_x_max) ||
8618 // left edge of roi window is beyond(right) of scalar crop's right edge
8619 (roi->rect.top > crop_y_max)){
8620 // top edge of roi windo is above scalar crop's top edge
8621 return false;
8622 }
8623 if (roi->rect.left < scalerCropRegion->left) {
8624 roi->rect.left = scalerCropRegion->left;
8625 }
8626 if (roi->rect.top < scalerCropRegion->top) {
8627 roi->rect.top = scalerCropRegion->top;
8628 }
8629 if (roi_x_max > crop_x_max) {
8630 roi_x_max = crop_x_max;
8631 }
8632 if (roi_y_max > crop_y_max) {
8633 roi_y_max = crop_y_max;
8634 }
8635 roi->rect.width = roi_x_max - roi->rect.left;
8636 roi->rect.height = roi_y_max - roi->rect.top;
8637 return true;
8638}
8639
8640/*===========================================================================
8641 * FUNCTION : convertLandmarks
8642 *
8643 * DESCRIPTION: helper method to extract the landmarks from face detection info
8644 *
8645 * PARAMETERS :
8646 * @landmark_data : input landmark data to be converted
8647 * @landmarks : int32_t destination array
8648 *
8649 *
8650 *==========================================================================*/
8651void QCamera3HardwareInterface::convertLandmarks(
8652 cam_face_landmarks_info_t landmark_data,
8653 int32_t *landmarks)
8654{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008655 if (landmark_data.is_left_eye_valid) {
8656 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8657 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8658 } else {
8659 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8660 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8661 }
8662
8663 if (landmark_data.is_right_eye_valid) {
8664 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8665 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8666 } else {
8667 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8668 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8669 }
8670
8671 if (landmark_data.is_mouth_valid) {
8672 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8673 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8674 } else {
8675 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8676 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8677 }
8678}
8679
8680/*===========================================================================
8681 * FUNCTION : setInvalidLandmarks
8682 *
8683 * DESCRIPTION: helper method to set invalid landmarks
8684 *
8685 * PARAMETERS :
8686 * @landmarks : int32_t destination array
8687 *
8688 *
8689 *==========================================================================*/
8690void QCamera3HardwareInterface::setInvalidLandmarks(
8691 int32_t *landmarks)
8692{
8693 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8694 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8695 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8696 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8697 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8698 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008699}
8700
8701#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008702
8703/*===========================================================================
8704 * FUNCTION : getCapabilities
8705 *
8706 * DESCRIPTION: query camera capability from back-end
8707 *
8708 * PARAMETERS :
8709 * @ops : mm-interface ops structure
8710 * @cam_handle : camera handle for which we need capability
8711 *
8712 * RETURN : ptr type of capability structure
8713 * capability for success
8714 * NULL for failure
8715 *==========================================================================*/
8716cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8717 uint32_t cam_handle)
8718{
8719 int rc = NO_ERROR;
8720 QCamera3HeapMemory *capabilityHeap = NULL;
8721 cam_capability_t *cap_ptr = NULL;
8722
8723 if (ops == NULL) {
8724 LOGE("Invalid arguments");
8725 return NULL;
8726 }
8727
8728 capabilityHeap = new QCamera3HeapMemory(1);
8729 if (capabilityHeap == NULL) {
8730 LOGE("creation of capabilityHeap failed");
8731 return NULL;
8732 }
8733
8734 /* Allocate memory for capability buffer */
8735 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8736 if(rc != OK) {
8737 LOGE("No memory for cappability");
8738 goto allocate_failed;
8739 }
8740
8741 /* Map memory for capability buffer */
8742 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8743
8744 rc = ops->map_buf(cam_handle,
8745 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8746 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8747 if(rc < 0) {
8748 LOGE("failed to map capability buffer");
8749 rc = FAILED_TRANSACTION;
8750 goto map_failed;
8751 }
8752
8753 /* Query Capability */
8754 rc = ops->query_capability(cam_handle);
8755 if(rc < 0) {
8756 LOGE("failed to query capability");
8757 rc = FAILED_TRANSACTION;
8758 goto query_failed;
8759 }
8760
8761 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8762 if (cap_ptr == NULL) {
8763 LOGE("out of memory");
8764 rc = NO_MEMORY;
8765 goto query_failed;
8766 }
8767
8768 memset(cap_ptr, 0, sizeof(cam_capability_t));
8769 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8770
8771 int index;
8772 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8773 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8774 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8775 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8776 }
8777
8778query_failed:
8779 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8780map_failed:
8781 capabilityHeap->deallocate();
8782allocate_failed:
8783 delete capabilityHeap;
8784
8785 if (rc != NO_ERROR) {
8786 return NULL;
8787 } else {
8788 return cap_ptr;
8789 }
8790}
8791
Thierry Strudel3d639192016-09-09 11:52:26 -07008792/*===========================================================================
8793 * FUNCTION : initCapabilities
8794 *
8795 * DESCRIPTION: initialize camera capabilities in static data struct
8796 *
8797 * PARAMETERS :
8798 * @cameraId : camera Id
8799 *
8800 * RETURN : int32_t type of status
8801 * NO_ERROR -- success
8802 * none-zero failure code
8803 *==========================================================================*/
8804int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8805{
8806 int rc = 0;
8807 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008808 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008809
8810 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8811 if (rc) {
8812 LOGE("camera_open failed. rc = %d", rc);
8813 goto open_failed;
8814 }
8815 if (!cameraHandle) {
8816 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8817 goto open_failed;
8818 }
8819
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008820 handle = get_main_camera_handle(cameraHandle->camera_handle);
8821 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8822 if (gCamCapability[cameraId] == NULL) {
8823 rc = FAILED_TRANSACTION;
8824 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008825 }
8826
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008827 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008828 if (is_dual_camera_by_idx(cameraId)) {
8829 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8830 gCamCapability[cameraId]->aux_cam_cap =
8831 getCapabilities(cameraHandle->ops, handle);
8832 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8833 rc = FAILED_TRANSACTION;
8834 free(gCamCapability[cameraId]);
8835 goto failed_op;
8836 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008837
8838 // Copy the main camera capability to main_cam_cap struct
8839 gCamCapability[cameraId]->main_cam_cap =
8840 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8841 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8842 LOGE("out of memory");
8843 rc = NO_MEMORY;
8844 goto failed_op;
8845 }
8846 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8847 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008848 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008849failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008850 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8851 cameraHandle = NULL;
8852open_failed:
8853 return rc;
8854}
8855
8856/*==========================================================================
8857 * FUNCTION : get3Aversion
8858 *
8859 * DESCRIPTION: get the Q3A S/W version
8860 *
8861 * PARAMETERS :
8862 * @sw_version: Reference of Q3A structure which will hold version info upon
8863 * return
8864 *
8865 * RETURN : None
8866 *
8867 *==========================================================================*/
8868void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8869{
8870 if(gCamCapability[mCameraId])
8871 sw_version = gCamCapability[mCameraId]->q3a_version;
8872 else
8873 LOGE("Capability structure NULL!");
8874}
8875
8876
8877/*===========================================================================
8878 * FUNCTION : initParameters
8879 *
8880 * DESCRIPTION: initialize camera parameters
8881 *
8882 * PARAMETERS :
8883 *
8884 * RETURN : int32_t type of status
8885 * NO_ERROR -- success
8886 * none-zero failure code
8887 *==========================================================================*/
8888int QCamera3HardwareInterface::initParameters()
8889{
8890 int rc = 0;
8891
8892 //Allocate Set Param Buffer
8893 mParamHeap = new QCamera3HeapMemory(1);
8894 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8895 if(rc != OK) {
8896 rc = NO_MEMORY;
8897 LOGE("Failed to allocate SETPARM Heap memory");
8898 delete mParamHeap;
8899 mParamHeap = NULL;
8900 return rc;
8901 }
8902
8903 //Map memory for parameters buffer
8904 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8905 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8906 mParamHeap->getFd(0),
8907 sizeof(metadata_buffer_t),
8908 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8909 if(rc < 0) {
8910 LOGE("failed to map SETPARM buffer");
8911 rc = FAILED_TRANSACTION;
8912 mParamHeap->deallocate();
8913 delete mParamHeap;
8914 mParamHeap = NULL;
8915 return rc;
8916 }
8917
8918 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8919
8920 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8921 return rc;
8922}
8923
8924/*===========================================================================
8925 * FUNCTION : deinitParameters
8926 *
8927 * DESCRIPTION: de-initialize camera parameters
8928 *
8929 * PARAMETERS :
8930 *
8931 * RETURN : NONE
8932 *==========================================================================*/
8933void QCamera3HardwareInterface::deinitParameters()
8934{
8935 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8936 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8937
8938 mParamHeap->deallocate();
8939 delete mParamHeap;
8940 mParamHeap = NULL;
8941
8942 mParameters = NULL;
8943
8944 free(mPrevParameters);
8945 mPrevParameters = NULL;
8946}
8947
8948/*===========================================================================
8949 * FUNCTION : calcMaxJpegSize
8950 *
8951 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8952 *
8953 * PARAMETERS :
8954 *
8955 * RETURN : max_jpeg_size
8956 *==========================================================================*/
8957size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8958{
8959 size_t max_jpeg_size = 0;
8960 size_t temp_width, temp_height;
8961 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8962 MAX_SIZES_CNT);
8963 for (size_t i = 0; i < count; i++) {
8964 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8965 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8966 if (temp_width * temp_height > max_jpeg_size ) {
8967 max_jpeg_size = temp_width * temp_height;
8968 }
8969 }
8970 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8971 return max_jpeg_size;
8972}
8973
8974/*===========================================================================
8975 * FUNCTION : getMaxRawSize
8976 *
8977 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8978 *
8979 * PARAMETERS :
8980 *
8981 * RETURN : Largest supported Raw Dimension
8982 *==========================================================================*/
8983cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8984{
8985 int max_width = 0;
8986 cam_dimension_t maxRawSize;
8987
8988 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8989 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8990 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8991 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8992 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8993 }
8994 }
8995 return maxRawSize;
8996}
8997
8998
8999/*===========================================================================
9000 * FUNCTION : calcMaxJpegDim
9001 *
9002 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9003 *
9004 * PARAMETERS :
9005 *
9006 * RETURN : max_jpeg_dim
9007 *==========================================================================*/
9008cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9009{
9010 cam_dimension_t max_jpeg_dim;
9011 cam_dimension_t curr_jpeg_dim;
9012 max_jpeg_dim.width = 0;
9013 max_jpeg_dim.height = 0;
9014 curr_jpeg_dim.width = 0;
9015 curr_jpeg_dim.height = 0;
9016 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9017 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9018 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9019 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9020 max_jpeg_dim.width * max_jpeg_dim.height ) {
9021 max_jpeg_dim.width = curr_jpeg_dim.width;
9022 max_jpeg_dim.height = curr_jpeg_dim.height;
9023 }
9024 }
9025 return max_jpeg_dim;
9026}
9027
9028/*===========================================================================
9029 * FUNCTION : addStreamConfig
9030 *
9031 * DESCRIPTION: adds the stream configuration to the array
9032 *
9033 * PARAMETERS :
9034 * @available_stream_configs : pointer to stream configuration array
9035 * @scalar_format : scalar format
9036 * @dim : configuration dimension
9037 * @config_type : input or output configuration type
9038 *
9039 * RETURN : NONE
9040 *==========================================================================*/
9041void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9042 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9043{
9044 available_stream_configs.add(scalar_format);
9045 available_stream_configs.add(dim.width);
9046 available_stream_configs.add(dim.height);
9047 available_stream_configs.add(config_type);
9048}
9049
9050/*===========================================================================
9051 * FUNCTION : suppportBurstCapture
9052 *
9053 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9054 *
9055 * PARAMETERS :
9056 * @cameraId : camera Id
9057 *
9058 * RETURN : true if camera supports BURST_CAPTURE
9059 * false otherwise
9060 *==========================================================================*/
9061bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9062{
9063 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9064 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9065 const int32_t highResWidth = 3264;
9066 const int32_t highResHeight = 2448;
9067
9068 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9069 // Maximum resolution images cannot be captured at >= 10fps
9070 // -> not supporting BURST_CAPTURE
9071 return false;
9072 }
9073
9074 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9075 // Maximum resolution images can be captured at >= 20fps
9076 // --> supporting BURST_CAPTURE
9077 return true;
9078 }
9079
9080 // Find the smallest highRes resolution, or largest resolution if there is none
9081 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9082 MAX_SIZES_CNT);
9083 size_t highRes = 0;
9084 while ((highRes + 1 < totalCnt) &&
9085 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9086 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9087 highResWidth * highResHeight)) {
9088 highRes++;
9089 }
9090 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9091 return true;
9092 } else {
9093 return false;
9094 }
9095}
9096
9097/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009098 * FUNCTION : getPDStatIndex
9099 *
9100 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9101 *
9102 * PARAMETERS :
9103 * @caps : camera capabilities
9104 *
9105 * RETURN : int32_t type
9106 * non-negative - on success
9107 * -1 - on failure
9108 *==========================================================================*/
9109int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9110 if (nullptr == caps) {
9111 return -1;
9112 }
9113
9114 uint32_t metaRawCount = caps->meta_raw_channel_count;
9115 int32_t ret = -1;
9116 for (size_t i = 0; i < metaRawCount; i++) {
9117 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9118 ret = i;
9119 break;
9120 }
9121 }
9122
9123 return ret;
9124}
9125
9126/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009127 * FUNCTION : initStaticMetadata
9128 *
9129 * DESCRIPTION: initialize the static metadata
9130 *
9131 * PARAMETERS :
9132 * @cameraId : camera Id
9133 *
9134 * RETURN : int32_t type of status
9135 * 0 -- success
9136 * non-zero failure code
9137 *==========================================================================*/
9138int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9139{
9140 int rc = 0;
9141 CameraMetadata staticInfo;
9142 size_t count = 0;
9143 bool limitedDevice = false;
9144 char prop[PROPERTY_VALUE_MAX];
9145 bool supportBurst = false;
9146
9147 supportBurst = supportBurstCapture(cameraId);
9148
9149 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9150 * guaranteed or if min fps of max resolution is less than 20 fps, its
9151 * advertised as limited device*/
9152 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9153 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9154 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9155 !supportBurst;
9156
9157 uint8_t supportedHwLvl = limitedDevice ?
9158 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009159#ifndef USE_HAL_3_3
9160 // LEVEL_3 - This device will support level 3.
9161 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9162#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009163 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009164#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009165
9166 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9167 &supportedHwLvl, 1);
9168
9169 bool facingBack = false;
9170 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9171 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9172 facingBack = true;
9173 }
9174 /*HAL 3 only*/
9175 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9176 &gCamCapability[cameraId]->min_focus_distance, 1);
9177
9178 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9179 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9180
9181 /*should be using focal lengths but sensor doesn't provide that info now*/
9182 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9183 &gCamCapability[cameraId]->focal_length,
9184 1);
9185
9186 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9187 gCamCapability[cameraId]->apertures,
9188 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9189
9190 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9191 gCamCapability[cameraId]->filter_densities,
9192 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9193
9194
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009195 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9196 size_t mode_count =
9197 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9198 for (size_t i = 0; i < mode_count; i++) {
9199 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9200 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009201 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009202 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009203
9204 int32_t lens_shading_map_size[] = {
9205 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9206 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9207 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9208 lens_shading_map_size,
9209 sizeof(lens_shading_map_size)/sizeof(int32_t));
9210
9211 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9212 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9213
9214 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9215 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9216
9217 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9218 &gCamCapability[cameraId]->max_frame_duration, 1);
9219
9220 camera_metadata_rational baseGainFactor = {
9221 gCamCapability[cameraId]->base_gain_factor.numerator,
9222 gCamCapability[cameraId]->base_gain_factor.denominator};
9223 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9224 &baseGainFactor, 1);
9225
9226 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9227 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9228
9229 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9230 gCamCapability[cameraId]->pixel_array_size.height};
9231 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9232 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9233
9234 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9235 gCamCapability[cameraId]->active_array_size.top,
9236 gCamCapability[cameraId]->active_array_size.width,
9237 gCamCapability[cameraId]->active_array_size.height};
9238 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9239 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9240
9241 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9242 &gCamCapability[cameraId]->white_level, 1);
9243
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009244 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9245 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9246 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009247 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009248 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009249
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009250#ifndef USE_HAL_3_3
9251 bool hasBlackRegions = false;
9252 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9253 LOGW("black_region_count: %d is bounded to %d",
9254 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9255 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9256 }
9257 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9258 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9259 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9260 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9261 }
9262 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9263 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9264 hasBlackRegions = true;
9265 }
9266#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009267 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9268 &gCamCapability[cameraId]->flash_charge_duration, 1);
9269
9270 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9271 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9272
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009273 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9274 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9275 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009276 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9277 &timestampSource, 1);
9278
Thierry Strudel54dc9782017-02-15 12:12:10 -08009279 //update histogram vendor data
9280 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009281 &gCamCapability[cameraId]->histogram_size, 1);
9282
Thierry Strudel54dc9782017-02-15 12:12:10 -08009283 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009284 &gCamCapability[cameraId]->max_histogram_count, 1);
9285
Shuzhen Wang14415f52016-11-16 18:26:18 -08009286 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9287 //so that app can request fewer number of bins than the maximum supported.
9288 std::vector<int32_t> histBins;
9289 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9290 histBins.push_back(maxHistBins);
9291 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9292 (maxHistBins & 0x1) == 0) {
9293 histBins.push_back(maxHistBins >> 1);
9294 maxHistBins >>= 1;
9295 }
9296 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9297 histBins.data(), histBins.size());
9298
Thierry Strudel3d639192016-09-09 11:52:26 -07009299 int32_t sharpness_map_size[] = {
9300 gCamCapability[cameraId]->sharpness_map_size.width,
9301 gCamCapability[cameraId]->sharpness_map_size.height};
9302
9303 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9304 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9305
9306 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9307 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9308
Emilian Peev0f3c3162017-03-15 12:57:46 +00009309 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9310 if (0 <= indexPD) {
9311 // Advertise PD stats data as part of the Depth capabilities
9312 int32_t depthWidth =
9313 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9314 int32_t depthHeight =
9315 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009316 int32_t depthStride =
9317 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009318 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9319 assert(0 < depthSamplesCount);
9320 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9321 &depthSamplesCount, 1);
9322
9323 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9324 depthHeight,
9325 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9326 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9327 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9328 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9329 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9330
9331 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9332 depthHeight, 33333333,
9333 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9334 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9335 depthMinDuration,
9336 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9337
9338 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9339 depthHeight, 0,
9340 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9341 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9342 depthStallDuration,
9343 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9344
9345 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9346 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009347
9348 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9349 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9350 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009351 }
9352
Thierry Strudel3d639192016-09-09 11:52:26 -07009353 int32_t scalar_formats[] = {
9354 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9355 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9356 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9357 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9358 HAL_PIXEL_FORMAT_RAW10,
9359 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009360 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9361 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9362 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009363
9364 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9365 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9366 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9367 count, MAX_SIZES_CNT, available_processed_sizes);
9368 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9369 available_processed_sizes, count * 2);
9370
9371 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9372 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9373 makeTable(gCamCapability[cameraId]->raw_dim,
9374 count, MAX_SIZES_CNT, available_raw_sizes);
9375 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9376 available_raw_sizes, count * 2);
9377
9378 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9379 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9380 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9381 count, MAX_SIZES_CNT, available_fps_ranges);
9382 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9383 available_fps_ranges, count * 2);
9384
9385 camera_metadata_rational exposureCompensationStep = {
9386 gCamCapability[cameraId]->exp_compensation_step.numerator,
9387 gCamCapability[cameraId]->exp_compensation_step.denominator};
9388 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9389 &exposureCompensationStep, 1);
9390
9391 Vector<uint8_t> availableVstabModes;
9392 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9393 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009394 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009395 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009396 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009397 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009398 count = IS_TYPE_MAX;
9399 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9400 for (size_t i = 0; i < count; i++) {
9401 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9402 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9403 eisSupported = true;
9404 break;
9405 }
9406 }
9407 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009408 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9409 }
9410 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9411 availableVstabModes.array(), availableVstabModes.size());
9412
9413 /*HAL 1 and HAL 3 common*/
9414 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9415 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9416 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009417 // Cap the max zoom to the max preferred value
9418 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009419 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9420 &maxZoom, 1);
9421
9422 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9423 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9424
9425 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9426 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9427 max3aRegions[2] = 0; /* AF not supported */
9428 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9429 max3aRegions, 3);
9430
9431 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9432 memset(prop, 0, sizeof(prop));
9433 property_get("persist.camera.facedetect", prop, "1");
9434 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9435 LOGD("Support face detection mode: %d",
9436 supportedFaceDetectMode);
9437
9438 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009439 /* support mode should be OFF if max number of face is 0 */
9440 if (maxFaces <= 0) {
9441 supportedFaceDetectMode = 0;
9442 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009443 Vector<uint8_t> availableFaceDetectModes;
9444 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9445 if (supportedFaceDetectMode == 1) {
9446 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9447 } else if (supportedFaceDetectMode == 2) {
9448 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9449 } else if (supportedFaceDetectMode == 3) {
9450 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9451 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9452 } else {
9453 maxFaces = 0;
9454 }
9455 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9456 availableFaceDetectModes.array(),
9457 availableFaceDetectModes.size());
9458 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9459 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009460 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9461 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9462 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009463
9464 int32_t exposureCompensationRange[] = {
9465 gCamCapability[cameraId]->exposure_compensation_min,
9466 gCamCapability[cameraId]->exposure_compensation_max};
9467 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9468 exposureCompensationRange,
9469 sizeof(exposureCompensationRange)/sizeof(int32_t));
9470
9471 uint8_t lensFacing = (facingBack) ?
9472 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9473 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9474
9475 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9476 available_thumbnail_sizes,
9477 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9478
9479 /*all sizes will be clubbed into this tag*/
9480 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9481 /*android.scaler.availableStreamConfigurations*/
9482 Vector<int32_t> available_stream_configs;
9483 cam_dimension_t active_array_dim;
9484 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9485 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009486
9487 /*advertise list of input dimensions supported based on below property.
9488 By default all sizes upto 5MP will be advertised.
9489 Note that the setprop resolution format should be WxH.
9490 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9491 To list all supported sizes, setprop needs to be set with "0x0" */
9492 cam_dimension_t minInputSize = {2592,1944}; //5MP
9493 memset(prop, 0, sizeof(prop));
9494 property_get("persist.camera.input.minsize", prop, "2592x1944");
9495 if (strlen(prop) > 0) {
9496 char *saveptr = NULL;
9497 char *token = strtok_r(prop, "x", &saveptr);
9498 if (token != NULL) {
9499 minInputSize.width = atoi(token);
9500 }
9501 token = strtok_r(NULL, "x", &saveptr);
9502 if (token != NULL) {
9503 minInputSize.height = atoi(token);
9504 }
9505 }
9506
Thierry Strudel3d639192016-09-09 11:52:26 -07009507 /* Add input/output stream configurations for each scalar formats*/
9508 for (size_t j = 0; j < scalar_formats_count; j++) {
9509 switch (scalar_formats[j]) {
9510 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9511 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9512 case HAL_PIXEL_FORMAT_RAW10:
9513 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9514 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9515 addStreamConfig(available_stream_configs, scalar_formats[j],
9516 gCamCapability[cameraId]->raw_dim[i],
9517 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9518 }
9519 break;
9520 case HAL_PIXEL_FORMAT_BLOB:
9521 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9522 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9523 addStreamConfig(available_stream_configs, scalar_formats[j],
9524 gCamCapability[cameraId]->picture_sizes_tbl[i],
9525 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9526 }
9527 break;
9528 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9529 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9530 default:
9531 cam_dimension_t largest_picture_size;
9532 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9533 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9534 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9535 addStreamConfig(available_stream_configs, scalar_formats[j],
9536 gCamCapability[cameraId]->picture_sizes_tbl[i],
9537 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009538 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009539 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9540 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009541 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9542 >= minInputSize.width) || (gCamCapability[cameraId]->
9543 picture_sizes_tbl[i].height >= minInputSize.height)) {
9544 addStreamConfig(available_stream_configs, scalar_formats[j],
9545 gCamCapability[cameraId]->picture_sizes_tbl[i],
9546 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9547 }
9548 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009549 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009550
Thierry Strudel3d639192016-09-09 11:52:26 -07009551 break;
9552 }
9553 }
9554
9555 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9556 available_stream_configs.array(), available_stream_configs.size());
9557 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9558 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9559
9560 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9561 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9562
9563 /* android.scaler.availableMinFrameDurations */
9564 Vector<int64_t> available_min_durations;
9565 for (size_t j = 0; j < scalar_formats_count; j++) {
9566 switch (scalar_formats[j]) {
9567 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9568 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9569 case HAL_PIXEL_FORMAT_RAW10:
9570 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9571 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9572 available_min_durations.add(scalar_formats[j]);
9573 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9574 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9575 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9576 }
9577 break;
9578 default:
9579 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9580 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9581 available_min_durations.add(scalar_formats[j]);
9582 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9583 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9584 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9585 }
9586 break;
9587 }
9588 }
9589 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9590 available_min_durations.array(), available_min_durations.size());
9591
9592 Vector<int32_t> available_hfr_configs;
9593 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9594 int32_t fps = 0;
9595 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9596 case CAM_HFR_MODE_60FPS:
9597 fps = 60;
9598 break;
9599 case CAM_HFR_MODE_90FPS:
9600 fps = 90;
9601 break;
9602 case CAM_HFR_MODE_120FPS:
9603 fps = 120;
9604 break;
9605 case CAM_HFR_MODE_150FPS:
9606 fps = 150;
9607 break;
9608 case CAM_HFR_MODE_180FPS:
9609 fps = 180;
9610 break;
9611 case CAM_HFR_MODE_210FPS:
9612 fps = 210;
9613 break;
9614 case CAM_HFR_MODE_240FPS:
9615 fps = 240;
9616 break;
9617 case CAM_HFR_MODE_480FPS:
9618 fps = 480;
9619 break;
9620 case CAM_HFR_MODE_OFF:
9621 case CAM_HFR_MODE_MAX:
9622 default:
9623 break;
9624 }
9625
9626 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9627 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9628 /* For each HFR frame rate, need to advertise one variable fps range
9629 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9630 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9631 * set by the app. When video recording is started, [120, 120] is
9632 * set. This way sensor configuration does not change when recording
9633 * is started */
9634
9635 /* (width, height, fps_min, fps_max, batch_size_max) */
9636 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9637 j < MAX_SIZES_CNT; j++) {
9638 available_hfr_configs.add(
9639 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9640 available_hfr_configs.add(
9641 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9642 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9643 available_hfr_configs.add(fps);
9644 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9645
9646 /* (width, height, fps_min, fps_max, batch_size_max) */
9647 available_hfr_configs.add(
9648 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9649 available_hfr_configs.add(
9650 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9651 available_hfr_configs.add(fps);
9652 available_hfr_configs.add(fps);
9653 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9654 }
9655 }
9656 }
9657 //Advertise HFR capability only if the property is set
9658 memset(prop, 0, sizeof(prop));
9659 property_get("persist.camera.hal3hfr.enable", prop, "1");
9660 uint8_t hfrEnable = (uint8_t)atoi(prop);
9661
9662 if(hfrEnable && available_hfr_configs.array()) {
9663 staticInfo.update(
9664 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9665 available_hfr_configs.array(), available_hfr_configs.size());
9666 }
9667
9668 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9669 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9670 &max_jpeg_size, 1);
9671
9672 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9673 size_t size = 0;
9674 count = CAM_EFFECT_MODE_MAX;
9675 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9676 for (size_t i = 0; i < count; i++) {
9677 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9678 gCamCapability[cameraId]->supported_effects[i]);
9679 if (NAME_NOT_FOUND != val) {
9680 avail_effects[size] = (uint8_t)val;
9681 size++;
9682 }
9683 }
9684 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9685 avail_effects,
9686 size);
9687
9688 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9689 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9690 size_t supported_scene_modes_cnt = 0;
9691 count = CAM_SCENE_MODE_MAX;
9692 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9693 for (size_t i = 0; i < count; i++) {
9694 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9695 CAM_SCENE_MODE_OFF) {
9696 int val = lookupFwkName(SCENE_MODES_MAP,
9697 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9698 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009699
Thierry Strudel3d639192016-09-09 11:52:26 -07009700 if (NAME_NOT_FOUND != val) {
9701 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9702 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9703 supported_scene_modes_cnt++;
9704 }
9705 }
9706 }
9707 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9708 avail_scene_modes,
9709 supported_scene_modes_cnt);
9710
9711 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9712 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9713 supported_scene_modes_cnt,
9714 CAM_SCENE_MODE_MAX,
9715 scene_mode_overrides,
9716 supported_indexes,
9717 cameraId);
9718
9719 if (supported_scene_modes_cnt == 0) {
9720 supported_scene_modes_cnt = 1;
9721 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9722 }
9723
9724 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9725 scene_mode_overrides, supported_scene_modes_cnt * 3);
9726
9727 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9728 ANDROID_CONTROL_MODE_AUTO,
9729 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9730 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9731 available_control_modes,
9732 3);
9733
9734 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9735 size = 0;
9736 count = CAM_ANTIBANDING_MODE_MAX;
9737 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9738 for (size_t i = 0; i < count; i++) {
9739 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9740 gCamCapability[cameraId]->supported_antibandings[i]);
9741 if (NAME_NOT_FOUND != val) {
9742 avail_antibanding_modes[size] = (uint8_t)val;
9743 size++;
9744 }
9745
9746 }
9747 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9748 avail_antibanding_modes,
9749 size);
9750
9751 uint8_t avail_abberation_modes[] = {
9752 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9753 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9754 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9755 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9756 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9757 if (0 == count) {
9758 // If no aberration correction modes are available for a device, this advertise OFF mode
9759 size = 1;
9760 } else {
9761 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9762 // So, advertize all 3 modes if atleast any one mode is supported as per the
9763 // new M requirement
9764 size = 3;
9765 }
9766 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9767 avail_abberation_modes,
9768 size);
9769
9770 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9771 size = 0;
9772 count = CAM_FOCUS_MODE_MAX;
9773 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9774 for (size_t i = 0; i < count; i++) {
9775 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9776 gCamCapability[cameraId]->supported_focus_modes[i]);
9777 if (NAME_NOT_FOUND != val) {
9778 avail_af_modes[size] = (uint8_t)val;
9779 size++;
9780 }
9781 }
9782 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9783 avail_af_modes,
9784 size);
9785
9786 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9787 size = 0;
9788 count = CAM_WB_MODE_MAX;
9789 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9790 for (size_t i = 0; i < count; i++) {
9791 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9792 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9793 gCamCapability[cameraId]->supported_white_balances[i]);
9794 if (NAME_NOT_FOUND != val) {
9795 avail_awb_modes[size] = (uint8_t)val;
9796 size++;
9797 }
9798 }
9799 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9800 avail_awb_modes,
9801 size);
9802
9803 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9804 count = CAM_FLASH_FIRING_LEVEL_MAX;
9805 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9806 count);
9807 for (size_t i = 0; i < count; i++) {
9808 available_flash_levels[i] =
9809 gCamCapability[cameraId]->supported_firing_levels[i];
9810 }
9811 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9812 available_flash_levels, count);
9813
9814 uint8_t flashAvailable;
9815 if (gCamCapability[cameraId]->flash_available)
9816 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9817 else
9818 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9819 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9820 &flashAvailable, 1);
9821
9822 Vector<uint8_t> avail_ae_modes;
9823 count = CAM_AE_MODE_MAX;
9824 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9825 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009826 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9827 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9828 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9829 }
9830 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009831 }
9832 if (flashAvailable) {
9833 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9834 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9835 }
9836 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9837 avail_ae_modes.array(),
9838 avail_ae_modes.size());
9839
9840 int32_t sensitivity_range[2];
9841 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9842 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9843 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9844 sensitivity_range,
9845 sizeof(sensitivity_range) / sizeof(int32_t));
9846
9847 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9848 &gCamCapability[cameraId]->max_analog_sensitivity,
9849 1);
9850
9851 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9852 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9853 &sensor_orientation,
9854 1);
9855
9856 int32_t max_output_streams[] = {
9857 MAX_STALLING_STREAMS,
9858 MAX_PROCESSED_STREAMS,
9859 MAX_RAW_STREAMS};
9860 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9861 max_output_streams,
9862 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9863
9864 uint8_t avail_leds = 0;
9865 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9866 &avail_leds, 0);
9867
9868 uint8_t focus_dist_calibrated;
9869 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9870 gCamCapability[cameraId]->focus_dist_calibrated);
9871 if (NAME_NOT_FOUND != val) {
9872 focus_dist_calibrated = (uint8_t)val;
9873 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9874 &focus_dist_calibrated, 1);
9875 }
9876
9877 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9878 size = 0;
9879 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9880 MAX_TEST_PATTERN_CNT);
9881 for (size_t i = 0; i < count; i++) {
9882 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9883 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9884 if (NAME_NOT_FOUND != testpatternMode) {
9885 avail_testpattern_modes[size] = testpatternMode;
9886 size++;
9887 }
9888 }
9889 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9890 avail_testpattern_modes,
9891 size);
9892
9893 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9894 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9895 &max_pipeline_depth,
9896 1);
9897
9898 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9899 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9900 &partial_result_count,
9901 1);
9902
9903 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9904 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9905
9906 Vector<uint8_t> available_capabilities;
9907 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9908 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9909 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9910 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9911 if (supportBurst) {
9912 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9913 }
9914 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9915 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9916 if (hfrEnable && available_hfr_configs.array()) {
9917 available_capabilities.add(
9918 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9919 }
9920
9921 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9922 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9923 }
9924 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9925 available_capabilities.array(),
9926 available_capabilities.size());
9927
9928 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9929 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9930 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9931 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9932
9933 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9934 &aeLockAvailable, 1);
9935
9936 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9937 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9938 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9939 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9940
9941 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9942 &awbLockAvailable, 1);
9943
9944 int32_t max_input_streams = 1;
9945 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9946 &max_input_streams,
9947 1);
9948
9949 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9950 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9951 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9952 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9953 HAL_PIXEL_FORMAT_YCbCr_420_888};
9954 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9955 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9956
9957 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9958 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9959 &max_latency,
9960 1);
9961
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009962#ifndef USE_HAL_3_3
9963 int32_t isp_sensitivity_range[2];
9964 isp_sensitivity_range[0] =
9965 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9966 isp_sensitivity_range[1] =
9967 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9968 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9969 isp_sensitivity_range,
9970 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9971#endif
9972
Thierry Strudel3d639192016-09-09 11:52:26 -07009973 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9974 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9975 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9976 available_hot_pixel_modes,
9977 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9978
9979 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9980 ANDROID_SHADING_MODE_FAST,
9981 ANDROID_SHADING_MODE_HIGH_QUALITY};
9982 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9983 available_shading_modes,
9984 3);
9985
9986 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9987 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9988 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9989 available_lens_shading_map_modes,
9990 2);
9991
9992 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9993 ANDROID_EDGE_MODE_FAST,
9994 ANDROID_EDGE_MODE_HIGH_QUALITY,
9995 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9996 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9997 available_edge_modes,
9998 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9999
10000 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10001 ANDROID_NOISE_REDUCTION_MODE_FAST,
10002 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10003 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10004 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10005 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10006 available_noise_red_modes,
10007 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10008
10009 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10010 ANDROID_TONEMAP_MODE_FAST,
10011 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10012 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10013 available_tonemap_modes,
10014 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10015
10016 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10017 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10018 available_hot_pixel_map_modes,
10019 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10020
10021 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10022 gCamCapability[cameraId]->reference_illuminant1);
10023 if (NAME_NOT_FOUND != val) {
10024 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10025 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10026 }
10027
10028 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10029 gCamCapability[cameraId]->reference_illuminant2);
10030 if (NAME_NOT_FOUND != val) {
10031 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10032 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10033 }
10034
10035 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10036 (void *)gCamCapability[cameraId]->forward_matrix1,
10037 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10038
10039 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10040 (void *)gCamCapability[cameraId]->forward_matrix2,
10041 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10042
10043 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10044 (void *)gCamCapability[cameraId]->color_transform1,
10045 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10046
10047 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10048 (void *)gCamCapability[cameraId]->color_transform2,
10049 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10050
10051 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10052 (void *)gCamCapability[cameraId]->calibration_transform1,
10053 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10054
10055 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10056 (void *)gCamCapability[cameraId]->calibration_transform2,
10057 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10058
10059 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10060 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10061 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10062 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10063 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10064 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10065 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10066 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10067 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10068 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10069 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10070 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10071 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10072 ANDROID_JPEG_GPS_COORDINATES,
10073 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10074 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10075 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10076 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10077 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10078 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10079 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10080 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10081 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10082 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010083#ifndef USE_HAL_3_3
10084 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10085#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010086 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010087 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010088 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10089 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010090 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010091 /* DevCamDebug metadata request_keys_basic */
10092 DEVCAMDEBUG_META_ENABLE,
10093 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010094 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010095 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010096 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010097 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010098 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010099 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010100
10101 size_t request_keys_cnt =
10102 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10103 Vector<int32_t> available_request_keys;
10104 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10105 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10106 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10107 }
10108
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010109 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010110 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10111 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10112 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010113 }
10114
Thierry Strudel3d639192016-09-09 11:52:26 -070010115 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10116 available_request_keys.array(), available_request_keys.size());
10117
10118 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10119 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10120 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10121 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10122 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10123 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10124 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10125 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10126 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10127 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10128 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10129 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10130 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10131 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10132 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10133 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10134 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010135 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010136 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10137 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10138 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010139 ANDROID_STATISTICS_FACE_SCORES,
10140#ifndef USE_HAL_3_3
10141 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10142#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010143 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010144 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010145 // DevCamDebug metadata result_keys_basic
10146 DEVCAMDEBUG_META_ENABLE,
10147 // DevCamDebug metadata result_keys AF
10148 DEVCAMDEBUG_AF_LENS_POSITION,
10149 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10150 DEVCAMDEBUG_AF_TOF_DISTANCE,
10151 DEVCAMDEBUG_AF_LUMA,
10152 DEVCAMDEBUG_AF_HAF_STATE,
10153 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10154 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10155 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10156 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10157 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10158 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10159 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10160 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10161 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10162 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10163 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10164 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10165 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10166 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10167 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10168 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10169 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10170 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10171 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10172 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10173 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10174 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10175 // DevCamDebug metadata result_keys AEC
10176 DEVCAMDEBUG_AEC_TARGET_LUMA,
10177 DEVCAMDEBUG_AEC_COMP_LUMA,
10178 DEVCAMDEBUG_AEC_AVG_LUMA,
10179 DEVCAMDEBUG_AEC_CUR_LUMA,
10180 DEVCAMDEBUG_AEC_LINECOUNT,
10181 DEVCAMDEBUG_AEC_REAL_GAIN,
10182 DEVCAMDEBUG_AEC_EXP_INDEX,
10183 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010184 // DevCamDebug metadata result_keys zzHDR
10185 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10186 DEVCAMDEBUG_AEC_L_LINECOUNT,
10187 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10188 DEVCAMDEBUG_AEC_S_LINECOUNT,
10189 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10190 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10191 // DevCamDebug metadata result_keys ADRC
10192 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10193 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10194 DEVCAMDEBUG_AEC_GTM_RATIO,
10195 DEVCAMDEBUG_AEC_LTM_RATIO,
10196 DEVCAMDEBUG_AEC_LA_RATIO,
10197 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010198 // DevCamDebug metadata result_keys AWB
10199 DEVCAMDEBUG_AWB_R_GAIN,
10200 DEVCAMDEBUG_AWB_G_GAIN,
10201 DEVCAMDEBUG_AWB_B_GAIN,
10202 DEVCAMDEBUG_AWB_CCT,
10203 DEVCAMDEBUG_AWB_DECISION,
10204 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010205 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10206 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10207 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010208 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010209 };
10210
Thierry Strudel3d639192016-09-09 11:52:26 -070010211 size_t result_keys_cnt =
10212 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10213
10214 Vector<int32_t> available_result_keys;
10215 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10216 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10217 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10218 }
10219 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10220 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10221 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10222 }
10223 if (supportedFaceDetectMode == 1) {
10224 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10225 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10226 } else if ((supportedFaceDetectMode == 2) ||
10227 (supportedFaceDetectMode == 3)) {
10228 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10229 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10230 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010231#ifndef USE_HAL_3_3
10232 if (hasBlackRegions) {
10233 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10234 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10235 }
10236#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010237
10238 if (gExposeEnableZslKey) {
10239 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10240 }
10241
Thierry Strudel3d639192016-09-09 11:52:26 -070010242 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10243 available_result_keys.array(), available_result_keys.size());
10244
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010245 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010246 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10247 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10248 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10249 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10250 ANDROID_SCALER_CROPPING_TYPE,
10251 ANDROID_SYNC_MAX_LATENCY,
10252 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10253 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10254 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10255 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10256 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10257 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10258 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10259 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10260 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10261 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10262 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10263 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10264 ANDROID_LENS_FACING,
10265 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10266 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10267 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10268 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10269 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10270 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10271 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10272 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10273 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10274 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10275 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10276 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10277 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10278 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10279 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10280 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10281 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10282 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10283 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10284 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010285 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010286 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10287 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10288 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10289 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10290 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10291 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10292 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10293 ANDROID_CONTROL_AVAILABLE_MODES,
10294 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10295 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10296 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10297 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010298 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10299#ifndef USE_HAL_3_3
10300 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10301 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10302#endif
10303 };
10304
10305 Vector<int32_t> available_characteristics_keys;
10306 available_characteristics_keys.appendArray(characteristics_keys_basic,
10307 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10308#ifndef USE_HAL_3_3
10309 if (hasBlackRegions) {
10310 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10311 }
10312#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010313
10314 if (0 <= indexPD) {
10315 int32_t depthKeys[] = {
10316 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10317 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10318 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10319 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10320 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10321 };
10322 available_characteristics_keys.appendArray(depthKeys,
10323 sizeof(depthKeys) / sizeof(depthKeys[0]));
10324 }
10325
Thierry Strudel3d639192016-09-09 11:52:26 -070010326 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010327 available_characteristics_keys.array(),
10328 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010329
10330 /*available stall durations depend on the hw + sw and will be different for different devices */
10331 /*have to add for raw after implementation*/
10332 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10333 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10334
10335 Vector<int64_t> available_stall_durations;
10336 for (uint32_t j = 0; j < stall_formats_count; j++) {
10337 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10338 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10339 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10340 available_stall_durations.add(stall_formats[j]);
10341 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10342 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10343 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10344 }
10345 } else {
10346 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10347 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10348 available_stall_durations.add(stall_formats[j]);
10349 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10350 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10351 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10352 }
10353 }
10354 }
10355 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10356 available_stall_durations.array(),
10357 available_stall_durations.size());
10358
10359 //QCAMERA3_OPAQUE_RAW
10360 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10361 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10362 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10363 case LEGACY_RAW:
10364 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10365 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10366 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10367 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10368 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10369 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10370 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10371 break;
10372 case MIPI_RAW:
10373 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10374 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10375 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10376 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10377 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10378 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10379 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10380 break;
10381 default:
10382 LOGE("unknown opaque_raw_format %d",
10383 gCamCapability[cameraId]->opaque_raw_fmt);
10384 break;
10385 }
10386 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10387
10388 Vector<int32_t> strides;
10389 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10390 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10391 cam_stream_buf_plane_info_t buf_planes;
10392 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10393 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10394 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10395 &gCamCapability[cameraId]->padding_info, &buf_planes);
10396 strides.add(buf_planes.plane_info.mp[0].stride);
10397 }
10398 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10399 strides.size());
10400
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010401 //TBD: remove the following line once backend advertises zzHDR in feature mask
10402 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010403 //Video HDR default
10404 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10405 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010406 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010407 int32_t vhdr_mode[] = {
10408 QCAMERA3_VIDEO_HDR_MODE_OFF,
10409 QCAMERA3_VIDEO_HDR_MODE_ON};
10410
10411 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10412 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10413 vhdr_mode, vhdr_mode_count);
10414 }
10415
Thierry Strudel3d639192016-09-09 11:52:26 -070010416 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10417 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10418 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10419
10420 uint8_t isMonoOnly =
10421 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10422 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10423 &isMonoOnly, 1);
10424
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010425#ifndef USE_HAL_3_3
10426 Vector<int32_t> opaque_size;
10427 for (size_t j = 0; j < scalar_formats_count; j++) {
10428 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10429 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10430 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10431 cam_stream_buf_plane_info_t buf_planes;
10432
10433 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10434 &gCamCapability[cameraId]->padding_info, &buf_planes);
10435
10436 if (rc == 0) {
10437 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10438 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10439 opaque_size.add(buf_planes.plane_info.frame_len);
10440 }else {
10441 LOGE("raw frame calculation failed!");
10442 }
10443 }
10444 }
10445 }
10446
10447 if ((opaque_size.size() > 0) &&
10448 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10449 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10450 else
10451 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10452#endif
10453
Thierry Strudel04e026f2016-10-10 11:27:36 -070010454 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10455 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10456 size = 0;
10457 count = CAM_IR_MODE_MAX;
10458 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10459 for (size_t i = 0; i < count; i++) {
10460 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10461 gCamCapability[cameraId]->supported_ir_modes[i]);
10462 if (NAME_NOT_FOUND != val) {
10463 avail_ir_modes[size] = (int32_t)val;
10464 size++;
10465 }
10466 }
10467 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10468 avail_ir_modes, size);
10469 }
10470
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010471 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10472 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10473 size = 0;
10474 count = CAM_AEC_CONVERGENCE_MAX;
10475 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10476 for (size_t i = 0; i < count; i++) {
10477 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10478 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10479 if (NAME_NOT_FOUND != val) {
10480 available_instant_aec_modes[size] = (int32_t)val;
10481 size++;
10482 }
10483 }
10484 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10485 available_instant_aec_modes, size);
10486 }
10487
Thierry Strudel54dc9782017-02-15 12:12:10 -080010488 int32_t sharpness_range[] = {
10489 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10490 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10491 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10492
10493 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10494 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10495 size = 0;
10496 count = CAM_BINNING_CORRECTION_MODE_MAX;
10497 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10498 for (size_t i = 0; i < count; i++) {
10499 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10500 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10501 gCamCapability[cameraId]->supported_binning_modes[i]);
10502 if (NAME_NOT_FOUND != val) {
10503 avail_binning_modes[size] = (int32_t)val;
10504 size++;
10505 }
10506 }
10507 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10508 avail_binning_modes, size);
10509 }
10510
10511 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10512 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10513 size = 0;
10514 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10515 for (size_t i = 0; i < count; i++) {
10516 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10517 gCamCapability[cameraId]->supported_aec_modes[i]);
10518 if (NAME_NOT_FOUND != val)
10519 available_aec_modes[size++] = val;
10520 }
10521 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10522 available_aec_modes, size);
10523 }
10524
10525 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10526 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10527 size = 0;
10528 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10529 for (size_t i = 0; i < count; i++) {
10530 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10531 gCamCapability[cameraId]->supported_iso_modes[i]);
10532 if (NAME_NOT_FOUND != val)
10533 available_iso_modes[size++] = val;
10534 }
10535 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10536 available_iso_modes, size);
10537 }
10538
10539 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010540 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010541 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10542 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10543 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10544
10545 int32_t available_saturation_range[4];
10546 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10547 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10548 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10549 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10550 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10551 available_saturation_range, 4);
10552
10553 uint8_t is_hdr_values[2];
10554 is_hdr_values[0] = 0;
10555 is_hdr_values[1] = 1;
10556 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10557 is_hdr_values, 2);
10558
10559 float is_hdr_confidence_range[2];
10560 is_hdr_confidence_range[0] = 0.0;
10561 is_hdr_confidence_range[1] = 1.0;
10562 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10563 is_hdr_confidence_range, 2);
10564
Emilian Peev0a972ef2017-03-16 10:25:53 +000010565 size_t eepromLength = strnlen(
10566 reinterpret_cast<const char *>(
10567 gCamCapability[cameraId]->eeprom_version_info),
10568 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10569 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010570 char easelInfo[] = ",E:N";
10571 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10572 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10573 eepromLength += sizeof(easelInfo);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010574 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10575 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010576 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010577 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010578 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10579 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10580 }
10581
Thierry Strudel3d639192016-09-09 11:52:26 -070010582 gStaticMetadata[cameraId] = staticInfo.release();
10583 return rc;
10584}
10585
10586/*===========================================================================
10587 * FUNCTION : makeTable
10588 *
10589 * DESCRIPTION: make a table of sizes
10590 *
10591 * PARAMETERS :
10592 *
10593 *
10594 *==========================================================================*/
10595void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10596 size_t max_size, int32_t *sizeTable)
10597{
10598 size_t j = 0;
10599 if (size > max_size) {
10600 size = max_size;
10601 }
10602 for (size_t i = 0; i < size; i++) {
10603 sizeTable[j] = dimTable[i].width;
10604 sizeTable[j+1] = dimTable[i].height;
10605 j+=2;
10606 }
10607}
10608
10609/*===========================================================================
10610 * FUNCTION : makeFPSTable
10611 *
10612 * DESCRIPTION: make a table of fps ranges
10613 *
10614 * PARAMETERS :
10615 *
10616 *==========================================================================*/
10617void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10618 size_t max_size, int32_t *fpsRangesTable)
10619{
10620 size_t j = 0;
10621 if (size > max_size) {
10622 size = max_size;
10623 }
10624 for (size_t i = 0; i < size; i++) {
10625 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10626 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10627 j+=2;
10628 }
10629}
10630
10631/*===========================================================================
10632 * FUNCTION : makeOverridesList
10633 *
10634 * DESCRIPTION: make a list of scene mode overrides
10635 *
10636 * PARAMETERS :
10637 *
10638 *
10639 *==========================================================================*/
10640void QCamera3HardwareInterface::makeOverridesList(
10641 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10642 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10643{
10644 /*daemon will give a list of overrides for all scene modes.
10645 However we should send the fwk only the overrides for the scene modes
10646 supported by the framework*/
10647 size_t j = 0;
10648 if (size > max_size) {
10649 size = max_size;
10650 }
10651 size_t focus_count = CAM_FOCUS_MODE_MAX;
10652 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10653 focus_count);
10654 for (size_t i = 0; i < size; i++) {
10655 bool supt = false;
10656 size_t index = supported_indexes[i];
10657 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10658 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10659 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10660 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10661 overridesTable[index].awb_mode);
10662 if (NAME_NOT_FOUND != val) {
10663 overridesList[j+1] = (uint8_t)val;
10664 }
10665 uint8_t focus_override = overridesTable[index].af_mode;
10666 for (size_t k = 0; k < focus_count; k++) {
10667 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10668 supt = true;
10669 break;
10670 }
10671 }
10672 if (supt) {
10673 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10674 focus_override);
10675 if (NAME_NOT_FOUND != val) {
10676 overridesList[j+2] = (uint8_t)val;
10677 }
10678 } else {
10679 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10680 }
10681 j+=3;
10682 }
10683}
10684
10685/*===========================================================================
10686 * FUNCTION : filterJpegSizes
10687 *
10688 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10689 * could be downscaled to
10690 *
10691 * PARAMETERS :
10692 *
10693 * RETURN : length of jpegSizes array
10694 *==========================================================================*/
10695
10696size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10697 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10698 uint8_t downscale_factor)
10699{
10700 if (0 == downscale_factor) {
10701 downscale_factor = 1;
10702 }
10703
10704 int32_t min_width = active_array_size.width / downscale_factor;
10705 int32_t min_height = active_array_size.height / downscale_factor;
10706 size_t jpegSizesCnt = 0;
10707 if (processedSizesCnt > maxCount) {
10708 processedSizesCnt = maxCount;
10709 }
10710 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10711 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10712 jpegSizes[jpegSizesCnt] = processedSizes[i];
10713 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10714 jpegSizesCnt += 2;
10715 }
10716 }
10717 return jpegSizesCnt;
10718}
10719
10720/*===========================================================================
10721 * FUNCTION : computeNoiseModelEntryS
10722 *
10723 * DESCRIPTION: function to map a given sensitivity to the S noise
10724 * model parameters in the DNG noise model.
10725 *
10726 * PARAMETERS : sens : the sensor sensitivity
10727 *
10728 ** RETURN : S (sensor amplification) noise
10729 *
10730 *==========================================================================*/
10731double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10732 double s = gCamCapability[mCameraId]->gradient_S * sens +
10733 gCamCapability[mCameraId]->offset_S;
10734 return ((s < 0.0) ? 0.0 : s);
10735}
10736
10737/*===========================================================================
10738 * FUNCTION : computeNoiseModelEntryO
10739 *
10740 * DESCRIPTION: function to map a given sensitivity to the O noise
10741 * model parameters in the DNG noise model.
10742 *
10743 * PARAMETERS : sens : the sensor sensitivity
10744 *
10745 ** RETURN : O (sensor readout) noise
10746 *
10747 *==========================================================================*/
10748double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10749 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10750 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10751 1.0 : (1.0 * sens / max_analog_sens);
10752 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10753 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10754 return ((o < 0.0) ? 0.0 : o);
10755}
10756
10757/*===========================================================================
10758 * FUNCTION : getSensorSensitivity
10759 *
10760 * DESCRIPTION: convert iso_mode to an integer value
10761 *
10762 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10763 *
10764 ** RETURN : sensitivity supported by sensor
10765 *
10766 *==========================================================================*/
10767int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10768{
10769 int32_t sensitivity;
10770
10771 switch (iso_mode) {
10772 case CAM_ISO_MODE_100:
10773 sensitivity = 100;
10774 break;
10775 case CAM_ISO_MODE_200:
10776 sensitivity = 200;
10777 break;
10778 case CAM_ISO_MODE_400:
10779 sensitivity = 400;
10780 break;
10781 case CAM_ISO_MODE_800:
10782 sensitivity = 800;
10783 break;
10784 case CAM_ISO_MODE_1600:
10785 sensitivity = 1600;
10786 break;
10787 default:
10788 sensitivity = -1;
10789 break;
10790 }
10791 return sensitivity;
10792}
10793
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010794int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010795 if (gEaselManagerClient == nullptr) {
10796 gEaselManagerClient = EaselManagerClient::create();
10797 if (gEaselManagerClient == nullptr) {
10798 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10799 return -ENODEV;
10800 }
10801 }
10802
10803 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010804 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10805 // to connect to Easel.
10806 bool doNotpowerOnEasel =
10807 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10808
10809 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010810 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10811 return OK;
10812 }
10813
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010814 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010815 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010816 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010817 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010818 return res;
10819 }
10820
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010821 EaselManagerClientOpened = true;
10822
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010823 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010824 if (res != OK) {
10825 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10826 }
10827
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010828 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010829 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010830
10831 // Expose enableZsl key only when HDR+ mode is enabled.
10832 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010833 }
10834
10835 return OK;
10836}
10837
Thierry Strudel3d639192016-09-09 11:52:26 -070010838/*===========================================================================
10839 * FUNCTION : getCamInfo
10840 *
10841 * DESCRIPTION: query camera capabilities
10842 *
10843 * PARAMETERS :
10844 * @cameraId : camera Id
10845 * @info : camera info struct to be filled in with camera capabilities
10846 *
10847 * RETURN : int type of status
10848 * NO_ERROR -- success
10849 * none-zero failure code
10850 *==========================================================================*/
10851int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10852 struct camera_info *info)
10853{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010854 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010855 int rc = 0;
10856
10857 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010858
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010859 {
10860 Mutex::Autolock l(gHdrPlusClientLock);
10861 rc = initHdrPlusClientLocked();
10862 if (rc != OK) {
10863 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10864 pthread_mutex_unlock(&gCamLock);
10865 return rc;
10866 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010867 }
10868
Thierry Strudel3d639192016-09-09 11:52:26 -070010869 if (NULL == gCamCapability[cameraId]) {
10870 rc = initCapabilities(cameraId);
10871 if (rc < 0) {
10872 pthread_mutex_unlock(&gCamLock);
10873 return rc;
10874 }
10875 }
10876
10877 if (NULL == gStaticMetadata[cameraId]) {
10878 rc = initStaticMetadata(cameraId);
10879 if (rc < 0) {
10880 pthread_mutex_unlock(&gCamLock);
10881 return rc;
10882 }
10883 }
10884
10885 switch(gCamCapability[cameraId]->position) {
10886 case CAM_POSITION_BACK:
10887 case CAM_POSITION_BACK_AUX:
10888 info->facing = CAMERA_FACING_BACK;
10889 break;
10890
10891 case CAM_POSITION_FRONT:
10892 case CAM_POSITION_FRONT_AUX:
10893 info->facing = CAMERA_FACING_FRONT;
10894 break;
10895
10896 default:
10897 LOGE("Unknown position type %d for camera id:%d",
10898 gCamCapability[cameraId]->position, cameraId);
10899 rc = -1;
10900 break;
10901 }
10902
10903
10904 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010905#ifndef USE_HAL_3_3
10906 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10907#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010908 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010909#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010910 info->static_camera_characteristics = gStaticMetadata[cameraId];
10911
10912 //For now assume both cameras can operate independently.
10913 info->conflicting_devices = NULL;
10914 info->conflicting_devices_length = 0;
10915
10916 //resource cost is 100 * MIN(1.0, m/M),
10917 //where m is throughput requirement with maximum stream configuration
10918 //and M is CPP maximum throughput.
10919 float max_fps = 0.0;
10920 for (uint32_t i = 0;
10921 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10922 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10923 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10924 }
10925 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10926 gCamCapability[cameraId]->active_array_size.width *
10927 gCamCapability[cameraId]->active_array_size.height * max_fps /
10928 gCamCapability[cameraId]->max_pixel_bandwidth;
10929 info->resource_cost = 100 * MIN(1.0, ratio);
10930 LOGI("camera %d resource cost is %d", cameraId,
10931 info->resource_cost);
10932
10933 pthread_mutex_unlock(&gCamLock);
10934 return rc;
10935}
10936
10937/*===========================================================================
10938 * FUNCTION : translateCapabilityToMetadata
10939 *
10940 * DESCRIPTION: translate the capability into camera_metadata_t
10941 *
10942 * PARAMETERS : type of the request
10943 *
10944 *
10945 * RETURN : success: camera_metadata_t*
10946 * failure: NULL
10947 *
10948 *==========================================================================*/
10949camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10950{
10951 if (mDefaultMetadata[type] != NULL) {
10952 return mDefaultMetadata[type];
10953 }
10954 //first time we are handling this request
10955 //fill up the metadata structure using the wrapper class
10956 CameraMetadata settings;
10957 //translate from cam_capability_t to camera_metadata_tag_t
10958 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10959 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10960 int32_t defaultRequestID = 0;
10961 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10962
10963 /* OIS disable */
10964 char ois_prop[PROPERTY_VALUE_MAX];
10965 memset(ois_prop, 0, sizeof(ois_prop));
10966 property_get("persist.camera.ois.disable", ois_prop, "0");
10967 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10968
10969 /* Force video to use OIS */
10970 char videoOisProp[PROPERTY_VALUE_MAX];
10971 memset(videoOisProp, 0, sizeof(videoOisProp));
10972 property_get("persist.camera.ois.video", videoOisProp, "1");
10973 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010974
10975 // Hybrid AE enable/disable
10976 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10977 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10978 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10979 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10980
Thierry Strudel3d639192016-09-09 11:52:26 -070010981 uint8_t controlIntent = 0;
10982 uint8_t focusMode;
10983 uint8_t vsMode;
10984 uint8_t optStabMode;
10985 uint8_t cacMode;
10986 uint8_t edge_mode;
10987 uint8_t noise_red_mode;
10988 uint8_t tonemap_mode;
10989 bool highQualityModeEntryAvailable = FALSE;
10990 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010991 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010992 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10993 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010994 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010995 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010996 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010997
Thierry Strudel3d639192016-09-09 11:52:26 -070010998 switch (type) {
10999 case CAMERA3_TEMPLATE_PREVIEW:
11000 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11001 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11002 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11003 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11004 edge_mode = ANDROID_EDGE_MODE_FAST;
11005 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11006 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11007 break;
11008 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11009 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11010 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11011 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11012 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11013 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11014 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11015 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11016 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11017 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11018 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11019 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11020 highQualityModeEntryAvailable = TRUE;
11021 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11022 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11023 fastModeEntryAvailable = TRUE;
11024 }
11025 }
11026 if (highQualityModeEntryAvailable) {
11027 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11028 } else if (fastModeEntryAvailable) {
11029 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11030 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011031 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11032 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11033 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011034 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011035 break;
11036 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11037 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11038 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11039 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011040 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11041 edge_mode = ANDROID_EDGE_MODE_FAST;
11042 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11043 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11044 if (forceVideoOis)
11045 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11046 break;
11047 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11048 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11049 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11050 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011051 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11052 edge_mode = ANDROID_EDGE_MODE_FAST;
11053 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11054 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11055 if (forceVideoOis)
11056 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11057 break;
11058 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11059 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11060 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11061 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11062 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11063 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11064 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11065 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11066 break;
11067 case CAMERA3_TEMPLATE_MANUAL:
11068 edge_mode = ANDROID_EDGE_MODE_FAST;
11069 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11070 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11071 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11072 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11073 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11074 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11075 break;
11076 default:
11077 edge_mode = ANDROID_EDGE_MODE_FAST;
11078 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11079 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11080 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11081 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11082 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11083 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11084 break;
11085 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011086 // Set CAC to OFF if underlying device doesn't support
11087 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11088 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11089 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011090 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11091 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11092 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11093 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11094 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11095 }
11096 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011097 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011098 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011099
11100 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11101 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11102 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11103 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11104 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11105 || ois_disable)
11106 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11107 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011108 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011109
11110 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11111 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11112
11113 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11114 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11115
11116 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11117 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11118
11119 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11120 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11121
11122 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11123 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11124
11125 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11126 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11127
11128 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11129 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11130
11131 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11132 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11133
11134 /*flash*/
11135 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11136 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11137
11138 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11139 settings.update(ANDROID_FLASH_FIRING_POWER,
11140 &flashFiringLevel, 1);
11141
11142 /* lens */
11143 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11144 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11145
11146 if (gCamCapability[mCameraId]->filter_densities_count) {
11147 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11148 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11149 gCamCapability[mCameraId]->filter_densities_count);
11150 }
11151
11152 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11153 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11154
Thierry Strudel3d639192016-09-09 11:52:26 -070011155 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11156 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11157
11158 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11159 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11160
11161 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11162 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11163
11164 /* face detection (default to OFF) */
11165 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11166 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11167
Thierry Strudel54dc9782017-02-15 12:12:10 -080011168 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11169 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011170
11171 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11172 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11173
11174 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11175 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11176
Thierry Strudel3d639192016-09-09 11:52:26 -070011177
11178 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11179 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11180
11181 /* Exposure time(Update the Min Exposure Time)*/
11182 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11183 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11184
11185 /* frame duration */
11186 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11187 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11188
11189 /* sensitivity */
11190 static const int32_t default_sensitivity = 100;
11191 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011192#ifndef USE_HAL_3_3
11193 static const int32_t default_isp_sensitivity =
11194 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11195 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11196#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011197
11198 /*edge mode*/
11199 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11200
11201 /*noise reduction mode*/
11202 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11203
11204 /*color correction mode*/
11205 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11206 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11207
11208 /*transform matrix mode*/
11209 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11210
11211 int32_t scaler_crop_region[4];
11212 scaler_crop_region[0] = 0;
11213 scaler_crop_region[1] = 0;
11214 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11215 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11216 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11217
11218 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11219 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11220
11221 /*focus distance*/
11222 float focus_distance = 0.0;
11223 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11224
11225 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011226 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011227 float max_range = 0.0;
11228 float max_fixed_fps = 0.0;
11229 int32_t fps_range[2] = {0, 0};
11230 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11231 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011232 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11233 TEMPLATE_MAX_PREVIEW_FPS) {
11234 continue;
11235 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011236 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11237 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11238 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11239 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11240 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11241 if (range > max_range) {
11242 fps_range[0] =
11243 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11244 fps_range[1] =
11245 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11246 max_range = range;
11247 }
11248 } else {
11249 if (range < 0.01 && max_fixed_fps <
11250 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11251 fps_range[0] =
11252 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11253 fps_range[1] =
11254 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11255 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11256 }
11257 }
11258 }
11259 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11260
11261 /*precapture trigger*/
11262 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11263 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11264
11265 /*af trigger*/
11266 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11267 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11268
11269 /* ae & af regions */
11270 int32_t active_region[] = {
11271 gCamCapability[mCameraId]->active_array_size.left,
11272 gCamCapability[mCameraId]->active_array_size.top,
11273 gCamCapability[mCameraId]->active_array_size.left +
11274 gCamCapability[mCameraId]->active_array_size.width,
11275 gCamCapability[mCameraId]->active_array_size.top +
11276 gCamCapability[mCameraId]->active_array_size.height,
11277 0};
11278 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11279 sizeof(active_region) / sizeof(active_region[0]));
11280 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11281 sizeof(active_region) / sizeof(active_region[0]));
11282
11283 /* black level lock */
11284 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11285 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11286
Thierry Strudel3d639192016-09-09 11:52:26 -070011287 //special defaults for manual template
11288 if (type == CAMERA3_TEMPLATE_MANUAL) {
11289 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11290 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11291
11292 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11293 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11294
11295 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11296 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11297
11298 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11299 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11300
11301 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11302 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11303
11304 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11305 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11306 }
11307
11308
11309 /* TNR
11310 * We'll use this location to determine which modes TNR will be set.
11311 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11312 * This is not to be confused with linking on a per stream basis that decision
11313 * is still on per-session basis and will be handled as part of config stream
11314 */
11315 uint8_t tnr_enable = 0;
11316
11317 if (m_bTnrPreview || m_bTnrVideo) {
11318
11319 switch (type) {
11320 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11321 tnr_enable = 1;
11322 break;
11323
11324 default:
11325 tnr_enable = 0;
11326 break;
11327 }
11328
11329 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11330 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11331 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11332
11333 LOGD("TNR:%d with process plate %d for template:%d",
11334 tnr_enable, tnr_process_type, type);
11335 }
11336
11337 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011338 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011339 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11340
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011341 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011342 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11343
Shuzhen Wang920ea402017-05-03 08:49:39 -070011344 uint8_t related_camera_id = mCameraId;
11345 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011346
11347 /* CDS default */
11348 char prop[PROPERTY_VALUE_MAX];
11349 memset(prop, 0, sizeof(prop));
11350 property_get("persist.camera.CDS", prop, "Auto");
11351 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11352 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11353 if (CAM_CDS_MODE_MAX == cds_mode) {
11354 cds_mode = CAM_CDS_MODE_AUTO;
11355 }
11356
11357 /* Disabling CDS in templates which have TNR enabled*/
11358 if (tnr_enable)
11359 cds_mode = CAM_CDS_MODE_OFF;
11360
11361 int32_t mode = cds_mode;
11362 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011363
Thierry Strudel269c81a2016-10-12 12:13:59 -070011364 /* Manual Convergence AEC Speed is disabled by default*/
11365 float default_aec_speed = 0;
11366 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11367
11368 /* Manual Convergence AWB Speed is disabled by default*/
11369 float default_awb_speed = 0;
11370 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11371
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011372 // Set instant AEC to normal convergence by default
11373 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11374 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11375
Shuzhen Wang19463d72016-03-08 11:09:52 -080011376 /* hybrid ae */
11377 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11378
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011379 if (gExposeEnableZslKey) {
11380 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11381 }
11382
Thierry Strudel3d639192016-09-09 11:52:26 -070011383 mDefaultMetadata[type] = settings.release();
11384
11385 return mDefaultMetadata[type];
11386}
11387
11388/*===========================================================================
11389 * FUNCTION : setFrameParameters
11390 *
11391 * DESCRIPTION: set parameters per frame as requested in the metadata from
11392 * framework
11393 *
11394 * PARAMETERS :
11395 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011396 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011397 * @blob_request: Whether this request is a blob request or not
11398 *
11399 * RETURN : success: NO_ERROR
11400 * failure:
11401 *==========================================================================*/
11402int QCamera3HardwareInterface::setFrameParameters(
11403 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011404 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011405 int blob_request,
11406 uint32_t snapshotStreamId)
11407{
11408 /*translate from camera_metadata_t type to parm_type_t*/
11409 int rc = 0;
11410 int32_t hal_version = CAM_HAL_V3;
11411
11412 clear_metadata_buffer(mParameters);
11413 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11414 LOGE("Failed to set hal version in the parameters");
11415 return BAD_VALUE;
11416 }
11417
11418 /*we need to update the frame number in the parameters*/
11419 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11420 request->frame_number)) {
11421 LOGE("Failed to set the frame number in the parameters");
11422 return BAD_VALUE;
11423 }
11424
11425 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011426 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011427 LOGE("Failed to set stream type mask in the parameters");
11428 return BAD_VALUE;
11429 }
11430
11431 if (mUpdateDebugLevel) {
11432 uint32_t dummyDebugLevel = 0;
11433 /* The value of dummyDebugLevel is irrelavent. On
11434 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11435 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11436 dummyDebugLevel)) {
11437 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11438 return BAD_VALUE;
11439 }
11440 mUpdateDebugLevel = false;
11441 }
11442
11443 if(request->settings != NULL){
11444 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11445 if (blob_request)
11446 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11447 }
11448
11449 return rc;
11450}
11451
11452/*===========================================================================
11453 * FUNCTION : setReprocParameters
11454 *
11455 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11456 * return it.
11457 *
11458 * PARAMETERS :
11459 * @request : request that needs to be serviced
11460 *
11461 * RETURN : success: NO_ERROR
11462 * failure:
11463 *==========================================================================*/
11464int32_t QCamera3HardwareInterface::setReprocParameters(
11465 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11466 uint32_t snapshotStreamId)
11467{
11468 /*translate from camera_metadata_t type to parm_type_t*/
11469 int rc = 0;
11470
11471 if (NULL == request->settings){
11472 LOGE("Reprocess settings cannot be NULL");
11473 return BAD_VALUE;
11474 }
11475
11476 if (NULL == reprocParam) {
11477 LOGE("Invalid reprocessing metadata buffer");
11478 return BAD_VALUE;
11479 }
11480 clear_metadata_buffer(reprocParam);
11481
11482 /*we need to update the frame number in the parameters*/
11483 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11484 request->frame_number)) {
11485 LOGE("Failed to set the frame number in the parameters");
11486 return BAD_VALUE;
11487 }
11488
11489 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11490 if (rc < 0) {
11491 LOGE("Failed to translate reproc request");
11492 return rc;
11493 }
11494
11495 CameraMetadata frame_settings;
11496 frame_settings = request->settings;
11497 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11498 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11499 int32_t *crop_count =
11500 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11501 int32_t *crop_data =
11502 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11503 int32_t *roi_map =
11504 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11505 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11506 cam_crop_data_t crop_meta;
11507 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11508 crop_meta.num_of_streams = 1;
11509 crop_meta.crop_info[0].crop.left = crop_data[0];
11510 crop_meta.crop_info[0].crop.top = crop_data[1];
11511 crop_meta.crop_info[0].crop.width = crop_data[2];
11512 crop_meta.crop_info[0].crop.height = crop_data[3];
11513
11514 crop_meta.crop_info[0].roi_map.left =
11515 roi_map[0];
11516 crop_meta.crop_info[0].roi_map.top =
11517 roi_map[1];
11518 crop_meta.crop_info[0].roi_map.width =
11519 roi_map[2];
11520 crop_meta.crop_info[0].roi_map.height =
11521 roi_map[3];
11522
11523 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11524 rc = BAD_VALUE;
11525 }
11526 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11527 request->input_buffer->stream,
11528 crop_meta.crop_info[0].crop.left,
11529 crop_meta.crop_info[0].crop.top,
11530 crop_meta.crop_info[0].crop.width,
11531 crop_meta.crop_info[0].crop.height);
11532 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11533 request->input_buffer->stream,
11534 crop_meta.crop_info[0].roi_map.left,
11535 crop_meta.crop_info[0].roi_map.top,
11536 crop_meta.crop_info[0].roi_map.width,
11537 crop_meta.crop_info[0].roi_map.height);
11538 } else {
11539 LOGE("Invalid reprocess crop count %d!", *crop_count);
11540 }
11541 } else {
11542 LOGE("No crop data from matching output stream");
11543 }
11544
11545 /* These settings are not needed for regular requests so handle them specially for
11546 reprocess requests; information needed for EXIF tags */
11547 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11548 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11549 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11550 if (NAME_NOT_FOUND != val) {
11551 uint32_t flashMode = (uint32_t)val;
11552 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11553 rc = BAD_VALUE;
11554 }
11555 } else {
11556 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11557 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11558 }
11559 } else {
11560 LOGH("No flash mode in reprocess settings");
11561 }
11562
11563 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11564 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11565 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11566 rc = BAD_VALUE;
11567 }
11568 } else {
11569 LOGH("No flash state in reprocess settings");
11570 }
11571
11572 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11573 uint8_t *reprocessFlags =
11574 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11575 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11576 *reprocessFlags)) {
11577 rc = BAD_VALUE;
11578 }
11579 }
11580
Thierry Strudel54dc9782017-02-15 12:12:10 -080011581 // Add exif debug data to internal metadata
11582 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11583 mm_jpeg_debug_exif_params_t *debug_params =
11584 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11585 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11586 // AE
11587 if (debug_params->ae_debug_params_valid == TRUE) {
11588 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11589 debug_params->ae_debug_params);
11590 }
11591 // AWB
11592 if (debug_params->awb_debug_params_valid == TRUE) {
11593 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11594 debug_params->awb_debug_params);
11595 }
11596 // AF
11597 if (debug_params->af_debug_params_valid == TRUE) {
11598 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11599 debug_params->af_debug_params);
11600 }
11601 // ASD
11602 if (debug_params->asd_debug_params_valid == TRUE) {
11603 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11604 debug_params->asd_debug_params);
11605 }
11606 // Stats
11607 if (debug_params->stats_debug_params_valid == TRUE) {
11608 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11609 debug_params->stats_debug_params);
11610 }
11611 // BE Stats
11612 if (debug_params->bestats_debug_params_valid == TRUE) {
11613 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11614 debug_params->bestats_debug_params);
11615 }
11616 // BHIST
11617 if (debug_params->bhist_debug_params_valid == TRUE) {
11618 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11619 debug_params->bhist_debug_params);
11620 }
11621 // 3A Tuning
11622 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11623 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11624 debug_params->q3a_tuning_debug_params);
11625 }
11626 }
11627
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011628 // Add metadata which reprocess needs
11629 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11630 cam_reprocess_info_t *repro_info =
11631 (cam_reprocess_info_t *)frame_settings.find
11632 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011633 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011634 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011635 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011636 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011637 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011638 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011639 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011640 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011641 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011642 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011643 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011644 repro_info->pipeline_flip);
11645 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11646 repro_info->af_roi);
11647 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11648 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011649 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11650 CAM_INTF_PARM_ROTATION metadata then has been added in
11651 translateToHalMetadata. HAL need to keep this new rotation
11652 metadata. Otherwise, the old rotation info saved in the vendor tag
11653 would be used */
11654 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11655 CAM_INTF_PARM_ROTATION, reprocParam) {
11656 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11657 } else {
11658 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011659 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011660 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011661 }
11662
11663 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11664 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11665 roi.width and roi.height would be the final JPEG size.
11666 For now, HAL only checks this for reprocess request */
11667 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11668 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11669 uint8_t *enable =
11670 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11671 if (*enable == TRUE) {
11672 int32_t *crop_data =
11673 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11674 cam_stream_crop_info_t crop_meta;
11675 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11676 crop_meta.stream_id = 0;
11677 crop_meta.crop.left = crop_data[0];
11678 crop_meta.crop.top = crop_data[1];
11679 crop_meta.crop.width = crop_data[2];
11680 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011681 // The JPEG crop roi should match cpp output size
11682 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11683 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11684 crop_meta.roi_map.left = 0;
11685 crop_meta.roi_map.top = 0;
11686 crop_meta.roi_map.width = cpp_crop->crop.width;
11687 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011688 }
11689 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11690 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011691 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011692 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011693 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11694 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011695 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011696 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11697
11698 // Add JPEG scale information
11699 cam_dimension_t scale_dim;
11700 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11701 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11702 int32_t *roi =
11703 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11704 scale_dim.width = roi[2];
11705 scale_dim.height = roi[3];
11706 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11707 scale_dim);
11708 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11709 scale_dim.width, scale_dim.height, mCameraId);
11710 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011711 }
11712 }
11713
11714 return rc;
11715}
11716
11717/*===========================================================================
11718 * FUNCTION : saveRequestSettings
11719 *
11720 * DESCRIPTION: Add any settings that might have changed to the request settings
11721 * and save the settings to be applied on the frame
11722 *
11723 * PARAMETERS :
11724 * @jpegMetadata : the extracted and/or modified jpeg metadata
11725 * @request : request with initial settings
11726 *
11727 * RETURN :
11728 * camera_metadata_t* : pointer to the saved request settings
11729 *==========================================================================*/
11730camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11731 const CameraMetadata &jpegMetadata,
11732 camera3_capture_request_t *request)
11733{
11734 camera_metadata_t *resultMetadata;
11735 CameraMetadata camMetadata;
11736 camMetadata = request->settings;
11737
11738 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11739 int32_t thumbnail_size[2];
11740 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11741 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11742 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11743 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11744 }
11745
11746 if (request->input_buffer != NULL) {
11747 uint8_t reprocessFlags = 1;
11748 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11749 (uint8_t*)&reprocessFlags,
11750 sizeof(reprocessFlags));
11751 }
11752
11753 resultMetadata = camMetadata.release();
11754 return resultMetadata;
11755}
11756
11757/*===========================================================================
11758 * FUNCTION : setHalFpsRange
11759 *
11760 * DESCRIPTION: set FPS range parameter
11761 *
11762 *
11763 * PARAMETERS :
11764 * @settings : Metadata from framework
11765 * @hal_metadata: Metadata buffer
11766 *
11767 *
11768 * RETURN : success: NO_ERROR
11769 * failure:
11770 *==========================================================================*/
11771int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11772 metadata_buffer_t *hal_metadata)
11773{
11774 int32_t rc = NO_ERROR;
11775 cam_fps_range_t fps_range;
11776 fps_range.min_fps = (float)
11777 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11778 fps_range.max_fps = (float)
11779 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11780 fps_range.video_min_fps = fps_range.min_fps;
11781 fps_range.video_max_fps = fps_range.max_fps;
11782
11783 LOGD("aeTargetFpsRange fps: [%f %f]",
11784 fps_range.min_fps, fps_range.max_fps);
11785 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11786 * follows:
11787 * ---------------------------------------------------------------|
11788 * Video stream is absent in configure_streams |
11789 * (Camcorder preview before the first video record |
11790 * ---------------------------------------------------------------|
11791 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11792 * | | | vid_min/max_fps|
11793 * ---------------------------------------------------------------|
11794 * NO | [ 30, 240] | 240 | [240, 240] |
11795 * |-------------|-------------|----------------|
11796 * | [240, 240] | 240 | [240, 240] |
11797 * ---------------------------------------------------------------|
11798 * Video stream is present in configure_streams |
11799 * ---------------------------------------------------------------|
11800 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11801 * | | | vid_min/max_fps|
11802 * ---------------------------------------------------------------|
11803 * NO | [ 30, 240] | 240 | [240, 240] |
11804 * (camcorder prev |-------------|-------------|----------------|
11805 * after video rec | [240, 240] | 240 | [240, 240] |
11806 * is stopped) | | | |
11807 * ---------------------------------------------------------------|
11808 * YES | [ 30, 240] | 240 | [240, 240] |
11809 * |-------------|-------------|----------------|
11810 * | [240, 240] | 240 | [240, 240] |
11811 * ---------------------------------------------------------------|
11812 * When Video stream is absent in configure_streams,
11813 * preview fps = sensor_fps / batchsize
11814 * Eg: for 240fps at batchSize 4, preview = 60fps
11815 * for 120fps at batchSize 4, preview = 30fps
11816 *
11817 * When video stream is present in configure_streams, preview fps is as per
11818 * the ratio of preview buffers to video buffers requested in process
11819 * capture request
11820 */
11821 mBatchSize = 0;
11822 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11823 fps_range.min_fps = fps_range.video_max_fps;
11824 fps_range.video_min_fps = fps_range.video_max_fps;
11825 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11826 fps_range.max_fps);
11827 if (NAME_NOT_FOUND != val) {
11828 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11829 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11830 return BAD_VALUE;
11831 }
11832
11833 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11834 /* If batchmode is currently in progress and the fps changes,
11835 * set the flag to restart the sensor */
11836 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11837 (mHFRVideoFps != fps_range.max_fps)) {
11838 mNeedSensorRestart = true;
11839 }
11840 mHFRVideoFps = fps_range.max_fps;
11841 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11842 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11843 mBatchSize = MAX_HFR_BATCH_SIZE;
11844 }
11845 }
11846 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11847
11848 }
11849 } else {
11850 /* HFR mode is session param in backend/ISP. This should be reset when
11851 * in non-HFR mode */
11852 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11853 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11854 return BAD_VALUE;
11855 }
11856 }
11857 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11858 return BAD_VALUE;
11859 }
11860 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11861 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11862 return rc;
11863}
11864
11865/*===========================================================================
11866 * FUNCTION : translateToHalMetadata
11867 *
11868 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11869 *
11870 *
11871 * PARAMETERS :
11872 * @request : request sent from framework
11873 *
11874 *
11875 * RETURN : success: NO_ERROR
11876 * failure:
11877 *==========================================================================*/
11878int QCamera3HardwareInterface::translateToHalMetadata
11879 (const camera3_capture_request_t *request,
11880 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011881 uint32_t snapshotStreamId) {
11882 if (request == nullptr || hal_metadata == nullptr) {
11883 return BAD_VALUE;
11884 }
11885
11886 int64_t minFrameDuration = getMinFrameDuration(request);
11887
11888 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11889 minFrameDuration);
11890}
11891
11892int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11893 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11894 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11895
Thierry Strudel3d639192016-09-09 11:52:26 -070011896 int rc = 0;
11897 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011898 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011899
11900 /* Do not change the order of the following list unless you know what you are
11901 * doing.
11902 * The order is laid out in such a way that parameters in the front of the table
11903 * may be used to override the parameters later in the table. Examples are:
11904 * 1. META_MODE should precede AEC/AWB/AF MODE
11905 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11906 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11907 * 4. Any mode should precede it's corresponding settings
11908 */
11909 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11910 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11911 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11912 rc = BAD_VALUE;
11913 }
11914 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11915 if (rc != NO_ERROR) {
11916 LOGE("extractSceneMode failed");
11917 }
11918 }
11919
11920 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11921 uint8_t fwk_aeMode =
11922 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11923 uint8_t aeMode;
11924 int32_t redeye;
11925
11926 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11927 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011928 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11929 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011930 } else {
11931 aeMode = CAM_AE_MODE_ON;
11932 }
11933 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11934 redeye = 1;
11935 } else {
11936 redeye = 0;
11937 }
11938
11939 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11940 fwk_aeMode);
11941 if (NAME_NOT_FOUND != val) {
11942 int32_t flashMode = (int32_t)val;
11943 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11944 }
11945
11946 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11948 rc = BAD_VALUE;
11949 }
11950 }
11951
11952 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11953 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11954 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11955 fwk_whiteLevel);
11956 if (NAME_NOT_FOUND != val) {
11957 uint8_t whiteLevel = (uint8_t)val;
11958 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11959 rc = BAD_VALUE;
11960 }
11961 }
11962 }
11963
11964 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11965 uint8_t fwk_cacMode =
11966 frame_settings.find(
11967 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11968 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11969 fwk_cacMode);
11970 if (NAME_NOT_FOUND != val) {
11971 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11972 bool entryAvailable = FALSE;
11973 // Check whether Frameworks set CAC mode is supported in device or not
11974 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11975 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11976 entryAvailable = TRUE;
11977 break;
11978 }
11979 }
11980 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11981 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11982 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11983 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11984 if (entryAvailable == FALSE) {
11985 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11986 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11987 } else {
11988 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11989 // High is not supported and so set the FAST as spec say's underlying
11990 // device implementation can be the same for both modes.
11991 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11992 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11993 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11994 // in order to avoid the fps drop due to high quality
11995 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11996 } else {
11997 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11998 }
11999 }
12000 }
12001 LOGD("Final cacMode is %d", cacMode);
12002 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12003 rc = BAD_VALUE;
12004 }
12005 } else {
12006 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12007 }
12008 }
12009
Thierry Strudel2896d122017-02-23 19:18:03 -080012010 char af_value[PROPERTY_VALUE_MAX];
12011 property_get("persist.camera.af.infinity", af_value, "0");
12012
Jason Lee84ae9972017-02-24 13:24:24 -080012013 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080012014 if (atoi(af_value) == 0) {
12015 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012016 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012017 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12018 fwk_focusMode);
12019 if (NAME_NOT_FOUND != val) {
12020 uint8_t focusMode = (uint8_t)val;
12021 LOGD("set focus mode %d", focusMode);
12022 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12023 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12024 rc = BAD_VALUE;
12025 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012026 }
12027 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012028 } else {
12029 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12030 LOGE("Focus forced to infinity %d", focusMode);
12031 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12032 rc = BAD_VALUE;
12033 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012034 }
12035
Jason Lee84ae9972017-02-24 13:24:24 -080012036 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12037 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012038 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12039 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12040 focalDistance)) {
12041 rc = BAD_VALUE;
12042 }
12043 }
12044
12045 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12046 uint8_t fwk_antibandingMode =
12047 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12048 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12049 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12050 if (NAME_NOT_FOUND != val) {
12051 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012052 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12053 if (m60HzZone) {
12054 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12055 } else {
12056 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12057 }
12058 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012059 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12060 hal_antibandingMode)) {
12061 rc = BAD_VALUE;
12062 }
12063 }
12064 }
12065
12066 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12067 int32_t expCompensation = frame_settings.find(
12068 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12069 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12070 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12071 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12072 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012073 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012074 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12075 expCompensation)) {
12076 rc = BAD_VALUE;
12077 }
12078 }
12079
12080 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12081 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12082 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12083 rc = BAD_VALUE;
12084 }
12085 }
12086 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12087 rc = setHalFpsRange(frame_settings, hal_metadata);
12088 if (rc != NO_ERROR) {
12089 LOGE("setHalFpsRange failed");
12090 }
12091 }
12092
12093 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12094 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12096 rc = BAD_VALUE;
12097 }
12098 }
12099
12100 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12101 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12102 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12103 fwk_effectMode);
12104 if (NAME_NOT_FOUND != val) {
12105 uint8_t effectMode = (uint8_t)val;
12106 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12107 rc = BAD_VALUE;
12108 }
12109 }
12110 }
12111
12112 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12113 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12114 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12115 colorCorrectMode)) {
12116 rc = BAD_VALUE;
12117 }
12118 }
12119
12120 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12121 cam_color_correct_gains_t colorCorrectGains;
12122 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12123 colorCorrectGains.gains[i] =
12124 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12125 }
12126 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12127 colorCorrectGains)) {
12128 rc = BAD_VALUE;
12129 }
12130 }
12131
12132 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12133 cam_color_correct_matrix_t colorCorrectTransform;
12134 cam_rational_type_t transform_elem;
12135 size_t num = 0;
12136 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12137 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12138 transform_elem.numerator =
12139 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12140 transform_elem.denominator =
12141 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12142 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12143 num++;
12144 }
12145 }
12146 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12147 colorCorrectTransform)) {
12148 rc = BAD_VALUE;
12149 }
12150 }
12151
12152 cam_trigger_t aecTrigger;
12153 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12154 aecTrigger.trigger_id = -1;
12155 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12156 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12157 aecTrigger.trigger =
12158 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12159 aecTrigger.trigger_id =
12160 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12162 aecTrigger)) {
12163 rc = BAD_VALUE;
12164 }
12165 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12166 aecTrigger.trigger, aecTrigger.trigger_id);
12167 }
12168
12169 /*af_trigger must come with a trigger id*/
12170 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12171 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12172 cam_trigger_t af_trigger;
12173 af_trigger.trigger =
12174 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12175 af_trigger.trigger_id =
12176 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12177 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12178 rc = BAD_VALUE;
12179 }
12180 LOGD("AfTrigger: %d AfTriggerID: %d",
12181 af_trigger.trigger, af_trigger.trigger_id);
12182 }
12183
12184 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12185 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12186 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12187 rc = BAD_VALUE;
12188 }
12189 }
12190 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12191 cam_edge_application_t edge_application;
12192 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012193
Thierry Strudel3d639192016-09-09 11:52:26 -070012194 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12195 edge_application.sharpness = 0;
12196 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012197 edge_application.sharpness =
12198 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12199 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12200 int32_t sharpness =
12201 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12202 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12203 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12204 LOGD("Setting edge mode sharpness %d", sharpness);
12205 edge_application.sharpness = sharpness;
12206 }
12207 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012208 }
12209 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12210 rc = BAD_VALUE;
12211 }
12212 }
12213
12214 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12215 int32_t respectFlashMode = 1;
12216 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12217 uint8_t fwk_aeMode =
12218 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012219 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12220 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12221 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012222 respectFlashMode = 0;
12223 LOGH("AE Mode controls flash, ignore android.flash.mode");
12224 }
12225 }
12226 if (respectFlashMode) {
12227 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12228 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12229 LOGH("flash mode after mapping %d", val);
12230 // To check: CAM_INTF_META_FLASH_MODE usage
12231 if (NAME_NOT_FOUND != val) {
12232 uint8_t flashMode = (uint8_t)val;
12233 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12234 rc = BAD_VALUE;
12235 }
12236 }
12237 }
12238 }
12239
12240 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12241 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12242 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12243 rc = BAD_VALUE;
12244 }
12245 }
12246
12247 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12248 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12249 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12250 flashFiringTime)) {
12251 rc = BAD_VALUE;
12252 }
12253 }
12254
12255 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12256 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12257 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12258 hotPixelMode)) {
12259 rc = BAD_VALUE;
12260 }
12261 }
12262
12263 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12264 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12266 lensAperture)) {
12267 rc = BAD_VALUE;
12268 }
12269 }
12270
12271 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12272 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12274 filterDensity)) {
12275 rc = BAD_VALUE;
12276 }
12277 }
12278
12279 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12280 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12281 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12282 focalLength)) {
12283 rc = BAD_VALUE;
12284 }
12285 }
12286
12287 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12288 uint8_t optStabMode =
12289 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12290 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12291 optStabMode)) {
12292 rc = BAD_VALUE;
12293 }
12294 }
12295
12296 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12297 uint8_t videoStabMode =
12298 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12299 LOGD("videoStabMode from APP = %d", videoStabMode);
12300 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12301 videoStabMode)) {
12302 rc = BAD_VALUE;
12303 }
12304 }
12305
12306
12307 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12308 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12309 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12310 noiseRedMode)) {
12311 rc = BAD_VALUE;
12312 }
12313 }
12314
12315 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12316 float reprocessEffectiveExposureFactor =
12317 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12318 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12319 reprocessEffectiveExposureFactor)) {
12320 rc = BAD_VALUE;
12321 }
12322 }
12323
12324 cam_crop_region_t scalerCropRegion;
12325 bool scalerCropSet = false;
12326 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12327 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12328 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12329 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12330 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12331
12332 // Map coordinate system from active array to sensor output.
12333 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12334 scalerCropRegion.width, scalerCropRegion.height);
12335
12336 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12337 scalerCropRegion)) {
12338 rc = BAD_VALUE;
12339 }
12340 scalerCropSet = true;
12341 }
12342
12343 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12344 int64_t sensorExpTime =
12345 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12346 LOGD("setting sensorExpTime %lld", sensorExpTime);
12347 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12348 sensorExpTime)) {
12349 rc = BAD_VALUE;
12350 }
12351 }
12352
12353 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12354 int64_t sensorFrameDuration =
12355 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012356 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12357 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12358 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12359 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12360 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12361 sensorFrameDuration)) {
12362 rc = BAD_VALUE;
12363 }
12364 }
12365
12366 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12367 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12368 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12369 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12370 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12371 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12372 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12373 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12374 sensorSensitivity)) {
12375 rc = BAD_VALUE;
12376 }
12377 }
12378
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012379#ifndef USE_HAL_3_3
12380 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12381 int32_t ispSensitivity =
12382 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12383 if (ispSensitivity <
12384 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12385 ispSensitivity =
12386 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12387 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12388 }
12389 if (ispSensitivity >
12390 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12391 ispSensitivity =
12392 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12393 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12394 }
12395 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12396 ispSensitivity)) {
12397 rc = BAD_VALUE;
12398 }
12399 }
12400#endif
12401
Thierry Strudel3d639192016-09-09 11:52:26 -070012402 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12403 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12404 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12405 rc = BAD_VALUE;
12406 }
12407 }
12408
12409 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12410 uint8_t fwk_facedetectMode =
12411 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12412
12413 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12414 fwk_facedetectMode);
12415
12416 if (NAME_NOT_FOUND != val) {
12417 uint8_t facedetectMode = (uint8_t)val;
12418 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12419 facedetectMode)) {
12420 rc = BAD_VALUE;
12421 }
12422 }
12423 }
12424
Thierry Strudel54dc9782017-02-15 12:12:10 -080012425 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012426 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012427 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012428 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12429 histogramMode)) {
12430 rc = BAD_VALUE;
12431 }
12432 }
12433
12434 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12435 uint8_t sharpnessMapMode =
12436 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12437 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12438 sharpnessMapMode)) {
12439 rc = BAD_VALUE;
12440 }
12441 }
12442
12443 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12444 uint8_t tonemapMode =
12445 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12446 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12447 rc = BAD_VALUE;
12448 }
12449 }
12450 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12451 /*All tonemap channels will have the same number of points*/
12452 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12453 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12454 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12455 cam_rgb_tonemap_curves tonemapCurves;
12456 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12457 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12458 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12459 tonemapCurves.tonemap_points_cnt,
12460 CAM_MAX_TONEMAP_CURVE_SIZE);
12461 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12462 }
12463
12464 /* ch0 = G*/
12465 size_t point = 0;
12466 cam_tonemap_curve_t tonemapCurveGreen;
12467 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12468 for (size_t j = 0; j < 2; j++) {
12469 tonemapCurveGreen.tonemap_points[i][j] =
12470 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12471 point++;
12472 }
12473 }
12474 tonemapCurves.curves[0] = tonemapCurveGreen;
12475
12476 /* ch 1 = B */
12477 point = 0;
12478 cam_tonemap_curve_t tonemapCurveBlue;
12479 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12480 for (size_t j = 0; j < 2; j++) {
12481 tonemapCurveBlue.tonemap_points[i][j] =
12482 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12483 point++;
12484 }
12485 }
12486 tonemapCurves.curves[1] = tonemapCurveBlue;
12487
12488 /* ch 2 = R */
12489 point = 0;
12490 cam_tonemap_curve_t tonemapCurveRed;
12491 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12492 for (size_t j = 0; j < 2; j++) {
12493 tonemapCurveRed.tonemap_points[i][j] =
12494 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12495 point++;
12496 }
12497 }
12498 tonemapCurves.curves[2] = tonemapCurveRed;
12499
12500 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12501 tonemapCurves)) {
12502 rc = BAD_VALUE;
12503 }
12504 }
12505
12506 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12507 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12508 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12509 captureIntent)) {
12510 rc = BAD_VALUE;
12511 }
12512 }
12513
12514 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12515 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12516 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12517 blackLevelLock)) {
12518 rc = BAD_VALUE;
12519 }
12520 }
12521
12522 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12523 uint8_t lensShadingMapMode =
12524 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12525 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12526 lensShadingMapMode)) {
12527 rc = BAD_VALUE;
12528 }
12529 }
12530
12531 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12532 cam_area_t roi;
12533 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012534 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012535
12536 // Map coordinate system from active array to sensor output.
12537 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12538 roi.rect.height);
12539
12540 if (scalerCropSet) {
12541 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12542 }
12543 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12544 rc = BAD_VALUE;
12545 }
12546 }
12547
12548 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12549 cam_area_t roi;
12550 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012551 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012552
12553 // Map coordinate system from active array to sensor output.
12554 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12555 roi.rect.height);
12556
12557 if (scalerCropSet) {
12558 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12559 }
12560 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12561 rc = BAD_VALUE;
12562 }
12563 }
12564
12565 // CDS for non-HFR non-video mode
12566 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12567 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12568 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12569 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12570 LOGE("Invalid CDS mode %d!", *fwk_cds);
12571 } else {
12572 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12573 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12574 rc = BAD_VALUE;
12575 }
12576 }
12577 }
12578
Thierry Strudel04e026f2016-10-10 11:27:36 -070012579 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012580 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012581 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012582 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12583 }
12584 if (m_bVideoHdrEnabled)
12585 vhdr = CAM_VIDEO_HDR_MODE_ON;
12586
Thierry Strudel54dc9782017-02-15 12:12:10 -080012587 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12588
12589 if(vhdr != curr_hdr_state)
12590 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12591
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012592 rc = setVideoHdrMode(mParameters, vhdr);
12593 if (rc != NO_ERROR) {
12594 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012595 }
12596
12597 //IR
12598 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12599 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12600 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012601 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12602 uint8_t isIRon = 0;
12603
12604 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012605 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12606 LOGE("Invalid IR mode %d!", fwk_ir);
12607 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012608 if(isIRon != curr_ir_state )
12609 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12610
Thierry Strudel04e026f2016-10-10 11:27:36 -070012611 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12612 CAM_INTF_META_IR_MODE, fwk_ir)) {
12613 rc = BAD_VALUE;
12614 }
12615 }
12616 }
12617
Thierry Strudel54dc9782017-02-15 12:12:10 -080012618 //Binning Correction Mode
12619 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12620 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12621 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12622 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12623 || (0 > fwk_binning_correction)) {
12624 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12625 } else {
12626 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12627 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12628 rc = BAD_VALUE;
12629 }
12630 }
12631 }
12632
Thierry Strudel269c81a2016-10-12 12:13:59 -070012633 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12634 float aec_speed;
12635 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12636 LOGD("AEC Speed :%f", aec_speed);
12637 if ( aec_speed < 0 ) {
12638 LOGE("Invalid AEC mode %f!", aec_speed);
12639 } else {
12640 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12641 aec_speed)) {
12642 rc = BAD_VALUE;
12643 }
12644 }
12645 }
12646
12647 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12648 float awb_speed;
12649 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12650 LOGD("AWB Speed :%f", awb_speed);
12651 if ( awb_speed < 0 ) {
12652 LOGE("Invalid AWB mode %f!", awb_speed);
12653 } else {
12654 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12655 awb_speed)) {
12656 rc = BAD_VALUE;
12657 }
12658 }
12659 }
12660
Thierry Strudel3d639192016-09-09 11:52:26 -070012661 // TNR
12662 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12663 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12664 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012665 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012666 cam_denoise_param_t tnr;
12667 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12668 tnr.process_plates =
12669 (cam_denoise_process_type_t)frame_settings.find(
12670 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12671 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012672
12673 if(b_TnrRequested != curr_tnr_state)
12674 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12675
Thierry Strudel3d639192016-09-09 11:52:26 -070012676 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12677 rc = BAD_VALUE;
12678 }
12679 }
12680
Thierry Strudel54dc9782017-02-15 12:12:10 -080012681 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012682 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012683 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012684 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12685 *exposure_metering_mode)) {
12686 rc = BAD_VALUE;
12687 }
12688 }
12689
Thierry Strudel3d639192016-09-09 11:52:26 -070012690 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12691 int32_t fwk_testPatternMode =
12692 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12693 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12694 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12695
12696 if (NAME_NOT_FOUND != testPatternMode) {
12697 cam_test_pattern_data_t testPatternData;
12698 memset(&testPatternData, 0, sizeof(testPatternData));
12699 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12700 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12701 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12702 int32_t *fwk_testPatternData =
12703 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12704 testPatternData.r = fwk_testPatternData[0];
12705 testPatternData.b = fwk_testPatternData[3];
12706 switch (gCamCapability[mCameraId]->color_arrangement) {
12707 case CAM_FILTER_ARRANGEMENT_RGGB:
12708 case CAM_FILTER_ARRANGEMENT_GRBG:
12709 testPatternData.gr = fwk_testPatternData[1];
12710 testPatternData.gb = fwk_testPatternData[2];
12711 break;
12712 case CAM_FILTER_ARRANGEMENT_GBRG:
12713 case CAM_FILTER_ARRANGEMENT_BGGR:
12714 testPatternData.gr = fwk_testPatternData[2];
12715 testPatternData.gb = fwk_testPatternData[1];
12716 break;
12717 default:
12718 LOGE("color arrangement %d is not supported",
12719 gCamCapability[mCameraId]->color_arrangement);
12720 break;
12721 }
12722 }
12723 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12724 testPatternData)) {
12725 rc = BAD_VALUE;
12726 }
12727 } else {
12728 LOGE("Invalid framework sensor test pattern mode %d",
12729 fwk_testPatternMode);
12730 }
12731 }
12732
12733 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12734 size_t count = 0;
12735 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12736 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12737 gps_coords.data.d, gps_coords.count, count);
12738 if (gps_coords.count != count) {
12739 rc = BAD_VALUE;
12740 }
12741 }
12742
12743 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12744 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12745 size_t count = 0;
12746 const char *gps_methods_src = (const char *)
12747 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12748 memset(gps_methods, '\0', sizeof(gps_methods));
12749 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12750 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12751 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12752 if (GPS_PROCESSING_METHOD_SIZE != count) {
12753 rc = BAD_VALUE;
12754 }
12755 }
12756
12757 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12758 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12759 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12760 gps_timestamp)) {
12761 rc = BAD_VALUE;
12762 }
12763 }
12764
12765 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12766 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12767 cam_rotation_info_t rotation_info;
12768 if (orientation == 0) {
12769 rotation_info.rotation = ROTATE_0;
12770 } else if (orientation == 90) {
12771 rotation_info.rotation = ROTATE_90;
12772 } else if (orientation == 180) {
12773 rotation_info.rotation = ROTATE_180;
12774 } else if (orientation == 270) {
12775 rotation_info.rotation = ROTATE_270;
12776 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012777 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012778 rotation_info.streamId = snapshotStreamId;
12779 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12781 rc = BAD_VALUE;
12782 }
12783 }
12784
12785 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12786 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12787 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12788 rc = BAD_VALUE;
12789 }
12790 }
12791
12792 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12793 uint32_t thumb_quality = (uint32_t)
12794 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12795 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12796 thumb_quality)) {
12797 rc = BAD_VALUE;
12798 }
12799 }
12800
12801 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12802 cam_dimension_t dim;
12803 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12804 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12805 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12806 rc = BAD_VALUE;
12807 }
12808 }
12809
12810 // Internal metadata
12811 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12812 size_t count = 0;
12813 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12814 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12815 privatedata.data.i32, privatedata.count, count);
12816 if (privatedata.count != count) {
12817 rc = BAD_VALUE;
12818 }
12819 }
12820
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012821 // ISO/Exposure Priority
12822 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12823 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12824 cam_priority_mode_t mode =
12825 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12826 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12827 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12828 use_iso_exp_pty.previewOnly = FALSE;
12829 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12830 use_iso_exp_pty.value = *ptr;
12831
12832 if(CAM_ISO_PRIORITY == mode) {
12833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12834 use_iso_exp_pty)) {
12835 rc = BAD_VALUE;
12836 }
12837 }
12838 else {
12839 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12840 use_iso_exp_pty)) {
12841 rc = BAD_VALUE;
12842 }
12843 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012844
12845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12846 rc = BAD_VALUE;
12847 }
12848 }
12849 } else {
12850 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12851 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012852 }
12853 }
12854
12855 // Saturation
12856 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12857 int32_t* use_saturation =
12858 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12859 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12860 rc = BAD_VALUE;
12861 }
12862 }
12863
Thierry Strudel3d639192016-09-09 11:52:26 -070012864 // EV step
12865 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12866 gCamCapability[mCameraId]->exp_compensation_step)) {
12867 rc = BAD_VALUE;
12868 }
12869
12870 // CDS info
12871 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12872 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12873 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12874
12875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12876 CAM_INTF_META_CDS_DATA, *cdsData)) {
12877 rc = BAD_VALUE;
12878 }
12879 }
12880
Shuzhen Wang19463d72016-03-08 11:09:52 -080012881 // Hybrid AE
12882 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12883 uint8_t *hybrid_ae = (uint8_t *)
12884 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12885
12886 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12887 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12888 rc = BAD_VALUE;
12889 }
12890 }
12891
Shuzhen Wang14415f52016-11-16 18:26:18 -080012892 // Histogram
12893 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12894 uint8_t histogramMode =
12895 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12896 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12897 histogramMode)) {
12898 rc = BAD_VALUE;
12899 }
12900 }
12901
12902 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12903 int32_t histogramBins =
12904 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12905 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12906 histogramBins)) {
12907 rc = BAD_VALUE;
12908 }
12909 }
12910
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012911 // Tracking AF
12912 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12913 uint8_t trackingAfTrigger =
12914 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12916 trackingAfTrigger)) {
12917 rc = BAD_VALUE;
12918 }
12919 }
12920
Thierry Strudel3d639192016-09-09 11:52:26 -070012921 return rc;
12922}
12923
12924/*===========================================================================
12925 * FUNCTION : captureResultCb
12926 *
12927 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12928 *
12929 * PARAMETERS :
12930 * @frame : frame information from mm-camera-interface
12931 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12932 * @userdata: userdata
12933 *
12934 * RETURN : NONE
12935 *==========================================================================*/
12936void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12937 camera3_stream_buffer_t *buffer,
12938 uint32_t frame_number, bool isInputBuffer, void *userdata)
12939{
12940 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12941 if (hw == NULL) {
12942 LOGE("Invalid hw %p", hw);
12943 return;
12944 }
12945
12946 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12947 return;
12948}
12949
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012950/*===========================================================================
12951 * FUNCTION : setBufferErrorStatus
12952 *
12953 * DESCRIPTION: Callback handler for channels to report any buffer errors
12954 *
12955 * PARAMETERS :
12956 * @ch : Channel on which buffer error is reported from
12957 * @frame_number : frame number on which buffer error is reported on
12958 * @buffer_status : buffer error status
12959 * @userdata: userdata
12960 *
12961 * RETURN : NONE
12962 *==========================================================================*/
12963void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12964 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12965{
12966 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12967 if (hw == NULL) {
12968 LOGE("Invalid hw %p", hw);
12969 return;
12970 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012971
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012972 hw->setBufferErrorStatus(ch, frame_number, err);
12973 return;
12974}
12975
12976void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12977 uint32_t frameNumber, camera3_buffer_status_t err)
12978{
12979 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12980 pthread_mutex_lock(&mMutex);
12981
12982 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12983 if (req.frame_number != frameNumber)
12984 continue;
12985 for (auto& k : req.mPendingBufferList) {
12986 if(k.stream->priv == ch) {
12987 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12988 }
12989 }
12990 }
12991
12992 pthread_mutex_unlock(&mMutex);
12993 return;
12994}
Thierry Strudel3d639192016-09-09 11:52:26 -070012995/*===========================================================================
12996 * FUNCTION : initialize
12997 *
12998 * DESCRIPTION: Pass framework callback pointers to HAL
12999 *
13000 * PARAMETERS :
13001 *
13002 *
13003 * RETURN : Success : 0
13004 * Failure: -ENODEV
13005 *==========================================================================*/
13006
13007int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13008 const camera3_callback_ops_t *callback_ops)
13009{
13010 LOGD("E");
13011 QCamera3HardwareInterface *hw =
13012 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13013 if (!hw) {
13014 LOGE("NULL camera device");
13015 return -ENODEV;
13016 }
13017
13018 int rc = hw->initialize(callback_ops);
13019 LOGD("X");
13020 return rc;
13021}
13022
13023/*===========================================================================
13024 * FUNCTION : configure_streams
13025 *
13026 * DESCRIPTION:
13027 *
13028 * PARAMETERS :
13029 *
13030 *
13031 * RETURN : Success: 0
13032 * Failure: -EINVAL (if stream configuration is invalid)
13033 * -ENODEV (fatal error)
13034 *==========================================================================*/
13035
13036int QCamera3HardwareInterface::configure_streams(
13037 const struct camera3_device *device,
13038 camera3_stream_configuration_t *stream_list)
13039{
13040 LOGD("E");
13041 QCamera3HardwareInterface *hw =
13042 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13043 if (!hw) {
13044 LOGE("NULL camera device");
13045 return -ENODEV;
13046 }
13047 int rc = hw->configureStreams(stream_list);
13048 LOGD("X");
13049 return rc;
13050}
13051
13052/*===========================================================================
13053 * FUNCTION : construct_default_request_settings
13054 *
13055 * DESCRIPTION: Configure a settings buffer to meet the required use case
13056 *
13057 * PARAMETERS :
13058 *
13059 *
13060 * RETURN : Success: Return valid metadata
13061 * Failure: Return NULL
13062 *==========================================================================*/
13063const camera_metadata_t* QCamera3HardwareInterface::
13064 construct_default_request_settings(const struct camera3_device *device,
13065 int type)
13066{
13067
13068 LOGD("E");
13069 camera_metadata_t* fwk_metadata = NULL;
13070 QCamera3HardwareInterface *hw =
13071 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13072 if (!hw) {
13073 LOGE("NULL camera device");
13074 return NULL;
13075 }
13076
13077 fwk_metadata = hw->translateCapabilityToMetadata(type);
13078
13079 LOGD("X");
13080 return fwk_metadata;
13081}
13082
13083/*===========================================================================
13084 * FUNCTION : process_capture_request
13085 *
13086 * DESCRIPTION:
13087 *
13088 * PARAMETERS :
13089 *
13090 *
13091 * RETURN :
13092 *==========================================================================*/
13093int QCamera3HardwareInterface::process_capture_request(
13094 const struct camera3_device *device,
13095 camera3_capture_request_t *request)
13096{
13097 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013098 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013099 QCamera3HardwareInterface *hw =
13100 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13101 if (!hw) {
13102 LOGE("NULL camera device");
13103 return -EINVAL;
13104 }
13105
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013106 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013107 LOGD("X");
13108 return rc;
13109}
13110
13111/*===========================================================================
13112 * FUNCTION : dump
13113 *
13114 * DESCRIPTION:
13115 *
13116 * PARAMETERS :
13117 *
13118 *
13119 * RETURN :
13120 *==========================================================================*/
13121
13122void QCamera3HardwareInterface::dump(
13123 const struct camera3_device *device, int fd)
13124{
13125 /* Log level property is read when "adb shell dumpsys media.camera" is
13126 called so that the log level can be controlled without restarting
13127 the media server */
13128 getLogLevel();
13129
13130 LOGD("E");
13131 QCamera3HardwareInterface *hw =
13132 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13133 if (!hw) {
13134 LOGE("NULL camera device");
13135 return;
13136 }
13137
13138 hw->dump(fd);
13139 LOGD("X");
13140 return;
13141}
13142
13143/*===========================================================================
13144 * FUNCTION : flush
13145 *
13146 * DESCRIPTION:
13147 *
13148 * PARAMETERS :
13149 *
13150 *
13151 * RETURN :
13152 *==========================================================================*/
13153
13154int QCamera3HardwareInterface::flush(
13155 const struct camera3_device *device)
13156{
13157 int rc;
13158 LOGD("E");
13159 QCamera3HardwareInterface *hw =
13160 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13161 if (!hw) {
13162 LOGE("NULL camera device");
13163 return -EINVAL;
13164 }
13165
13166 pthread_mutex_lock(&hw->mMutex);
13167 // Validate current state
13168 switch (hw->mState) {
13169 case STARTED:
13170 /* valid state */
13171 break;
13172
13173 case ERROR:
13174 pthread_mutex_unlock(&hw->mMutex);
13175 hw->handleCameraDeviceError();
13176 return -ENODEV;
13177
13178 default:
13179 LOGI("Flush returned during state %d", hw->mState);
13180 pthread_mutex_unlock(&hw->mMutex);
13181 return 0;
13182 }
13183 pthread_mutex_unlock(&hw->mMutex);
13184
13185 rc = hw->flush(true /* restart channels */ );
13186 LOGD("X");
13187 return rc;
13188}
13189
13190/*===========================================================================
13191 * FUNCTION : close_camera_device
13192 *
13193 * DESCRIPTION:
13194 *
13195 * PARAMETERS :
13196 *
13197 *
13198 * RETURN :
13199 *==========================================================================*/
13200int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13201{
13202 int ret = NO_ERROR;
13203 QCamera3HardwareInterface *hw =
13204 reinterpret_cast<QCamera3HardwareInterface *>(
13205 reinterpret_cast<camera3_device_t *>(device)->priv);
13206 if (!hw) {
13207 LOGE("NULL camera device");
13208 return BAD_VALUE;
13209 }
13210
13211 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13212 delete hw;
13213 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013214 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013215 return ret;
13216}
13217
13218/*===========================================================================
13219 * FUNCTION : getWaveletDenoiseProcessPlate
13220 *
13221 * DESCRIPTION: query wavelet denoise process plate
13222 *
13223 * PARAMETERS : None
13224 *
13225 * RETURN : WNR prcocess plate value
13226 *==========================================================================*/
13227cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13228{
13229 char prop[PROPERTY_VALUE_MAX];
13230 memset(prop, 0, sizeof(prop));
13231 property_get("persist.denoise.process.plates", prop, "0");
13232 int processPlate = atoi(prop);
13233 switch(processPlate) {
13234 case 0:
13235 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13236 case 1:
13237 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13238 case 2:
13239 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13240 case 3:
13241 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13242 default:
13243 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13244 }
13245}
13246
13247
13248/*===========================================================================
13249 * FUNCTION : getTemporalDenoiseProcessPlate
13250 *
13251 * DESCRIPTION: query temporal denoise process plate
13252 *
13253 * PARAMETERS : None
13254 *
13255 * RETURN : TNR prcocess plate value
13256 *==========================================================================*/
13257cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13258{
13259 char prop[PROPERTY_VALUE_MAX];
13260 memset(prop, 0, sizeof(prop));
13261 property_get("persist.tnr.process.plates", prop, "0");
13262 int processPlate = atoi(prop);
13263 switch(processPlate) {
13264 case 0:
13265 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13266 case 1:
13267 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13268 case 2:
13269 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13270 case 3:
13271 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13272 default:
13273 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13274 }
13275}
13276
13277
13278/*===========================================================================
13279 * FUNCTION : extractSceneMode
13280 *
13281 * DESCRIPTION: Extract scene mode from frameworks set metadata
13282 *
13283 * PARAMETERS :
13284 * @frame_settings: CameraMetadata reference
13285 * @metaMode: ANDROID_CONTORL_MODE
13286 * @hal_metadata: hal metadata structure
13287 *
13288 * RETURN : None
13289 *==========================================================================*/
13290int32_t QCamera3HardwareInterface::extractSceneMode(
13291 const CameraMetadata &frame_settings, uint8_t metaMode,
13292 metadata_buffer_t *hal_metadata)
13293{
13294 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013295 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13296
13297 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13298 LOGD("Ignoring control mode OFF_KEEP_STATE");
13299 return NO_ERROR;
13300 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013301
13302 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13303 camera_metadata_ro_entry entry =
13304 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13305 if (0 == entry.count)
13306 return rc;
13307
13308 uint8_t fwk_sceneMode = entry.data.u8[0];
13309
13310 int val = lookupHalName(SCENE_MODES_MAP,
13311 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13312 fwk_sceneMode);
13313 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013314 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013315 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013316 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013317 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013318
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013319 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13320 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13321 }
13322
13323 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13324 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013325 cam_hdr_param_t hdr_params;
13326 hdr_params.hdr_enable = 1;
13327 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13328 hdr_params.hdr_need_1x = false;
13329 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13330 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13331 rc = BAD_VALUE;
13332 }
13333 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013334
Thierry Strudel3d639192016-09-09 11:52:26 -070013335 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13336 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13337 rc = BAD_VALUE;
13338 }
13339 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013340
13341 if (mForceHdrSnapshot) {
13342 cam_hdr_param_t hdr_params;
13343 hdr_params.hdr_enable = 1;
13344 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13345 hdr_params.hdr_need_1x = false;
13346 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13347 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13348 rc = BAD_VALUE;
13349 }
13350 }
13351
Thierry Strudel3d639192016-09-09 11:52:26 -070013352 return rc;
13353}
13354
13355/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013356 * FUNCTION : setVideoHdrMode
13357 *
13358 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13359 *
13360 * PARAMETERS :
13361 * @hal_metadata: hal metadata structure
13362 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13363 *
13364 * RETURN : None
13365 *==========================================================================*/
13366int32_t QCamera3HardwareInterface::setVideoHdrMode(
13367 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13368{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013369 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13370 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13371 }
13372
13373 LOGE("Invalid Video HDR mode %d!", vhdr);
13374 return BAD_VALUE;
13375}
13376
13377/*===========================================================================
13378 * FUNCTION : setSensorHDR
13379 *
13380 * DESCRIPTION: Enable/disable sensor HDR.
13381 *
13382 * PARAMETERS :
13383 * @hal_metadata: hal metadata structure
13384 * @enable: boolean whether to enable/disable sensor HDR
13385 *
13386 * RETURN : None
13387 *==========================================================================*/
13388int32_t QCamera3HardwareInterface::setSensorHDR(
13389 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13390{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013391 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013392 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13393
13394 if (enable) {
13395 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13396 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13397 #ifdef _LE_CAMERA_
13398 //Default to staggered HDR for IOT
13399 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13400 #else
13401 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13402 #endif
13403 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13404 }
13405
13406 bool isSupported = false;
13407 switch (sensor_hdr) {
13408 case CAM_SENSOR_HDR_IN_SENSOR:
13409 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13410 CAM_QCOM_FEATURE_SENSOR_HDR) {
13411 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013412 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013413 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013414 break;
13415 case CAM_SENSOR_HDR_ZIGZAG:
13416 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13417 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13418 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013419 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013420 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013421 break;
13422 case CAM_SENSOR_HDR_STAGGERED:
13423 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13424 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13425 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013426 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013427 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013428 break;
13429 case CAM_SENSOR_HDR_OFF:
13430 isSupported = true;
13431 LOGD("Turning off sensor HDR");
13432 break;
13433 default:
13434 LOGE("HDR mode %d not supported", sensor_hdr);
13435 rc = BAD_VALUE;
13436 break;
13437 }
13438
13439 if(isSupported) {
13440 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13441 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13442 rc = BAD_VALUE;
13443 } else {
13444 if(!isVideoHdrEnable)
13445 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013446 }
13447 }
13448 return rc;
13449}
13450
13451/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013452 * FUNCTION : needRotationReprocess
13453 *
13454 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13455 *
13456 * PARAMETERS : none
13457 *
13458 * RETURN : true: needed
13459 * false: no need
13460 *==========================================================================*/
13461bool QCamera3HardwareInterface::needRotationReprocess()
13462{
13463 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13464 // current rotation is not zero, and pp has the capability to process rotation
13465 LOGH("need do reprocess for rotation");
13466 return true;
13467 }
13468
13469 return false;
13470}
13471
13472/*===========================================================================
13473 * FUNCTION : needReprocess
13474 *
13475 * DESCRIPTION: if reprocess in needed
13476 *
13477 * PARAMETERS : none
13478 *
13479 * RETURN : true: needed
13480 * false: no need
13481 *==========================================================================*/
13482bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13483{
13484 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13485 // TODO: add for ZSL HDR later
13486 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13487 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13488 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13489 return true;
13490 } else {
13491 LOGH("already post processed frame");
13492 return false;
13493 }
13494 }
13495 return needRotationReprocess();
13496}
13497
13498/*===========================================================================
13499 * FUNCTION : needJpegExifRotation
13500 *
13501 * DESCRIPTION: if rotation from jpeg is needed
13502 *
13503 * PARAMETERS : none
13504 *
13505 * RETURN : true: needed
13506 * false: no need
13507 *==========================================================================*/
13508bool QCamera3HardwareInterface::needJpegExifRotation()
13509{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013510 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013511 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13512 LOGD("Need use Jpeg EXIF Rotation");
13513 return true;
13514 }
13515 return false;
13516}
13517
13518/*===========================================================================
13519 * FUNCTION : addOfflineReprocChannel
13520 *
13521 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13522 * coming from input channel
13523 *
13524 * PARAMETERS :
13525 * @config : reprocess configuration
13526 * @inputChHandle : pointer to the input (source) channel
13527 *
13528 *
13529 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13530 *==========================================================================*/
13531QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13532 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13533{
13534 int32_t rc = NO_ERROR;
13535 QCamera3ReprocessChannel *pChannel = NULL;
13536
13537 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013538 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13539 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013540 if (NULL == pChannel) {
13541 LOGE("no mem for reprocess channel");
13542 return NULL;
13543 }
13544
13545 rc = pChannel->initialize(IS_TYPE_NONE);
13546 if (rc != NO_ERROR) {
13547 LOGE("init reprocess channel failed, ret = %d", rc);
13548 delete pChannel;
13549 return NULL;
13550 }
13551
13552 // pp feature config
13553 cam_pp_feature_config_t pp_config;
13554 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13555
13556 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13557 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13558 & CAM_QCOM_FEATURE_DSDN) {
13559 //Use CPP CDS incase h/w supports it.
13560 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13561 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13562 }
13563 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13564 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13565 }
13566
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013567 if (config.hdr_param.hdr_enable) {
13568 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13569 pp_config.hdr_param = config.hdr_param;
13570 }
13571
13572 if (mForceHdrSnapshot) {
13573 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13574 pp_config.hdr_param.hdr_enable = 1;
13575 pp_config.hdr_param.hdr_need_1x = 0;
13576 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13577 }
13578
Thierry Strudel3d639192016-09-09 11:52:26 -070013579 rc = pChannel->addReprocStreamsFromSource(pp_config,
13580 config,
13581 IS_TYPE_NONE,
13582 mMetadataChannel);
13583
13584 if (rc != NO_ERROR) {
13585 delete pChannel;
13586 return NULL;
13587 }
13588 return pChannel;
13589}
13590
13591/*===========================================================================
13592 * FUNCTION : getMobicatMask
13593 *
13594 * DESCRIPTION: returns mobicat mask
13595 *
13596 * PARAMETERS : none
13597 *
13598 * RETURN : mobicat mask
13599 *
13600 *==========================================================================*/
13601uint8_t QCamera3HardwareInterface::getMobicatMask()
13602{
13603 return m_MobicatMask;
13604}
13605
13606/*===========================================================================
13607 * FUNCTION : setMobicat
13608 *
13609 * DESCRIPTION: set Mobicat on/off.
13610 *
13611 * PARAMETERS :
13612 * @params : none
13613 *
13614 * RETURN : int32_t type of status
13615 * NO_ERROR -- success
13616 * none-zero failure code
13617 *==========================================================================*/
13618int32_t QCamera3HardwareInterface::setMobicat()
13619{
13620 char value [PROPERTY_VALUE_MAX];
13621 property_get("persist.camera.mobicat", value, "0");
13622 int32_t ret = NO_ERROR;
13623 uint8_t enableMobi = (uint8_t)atoi(value);
13624
13625 if (enableMobi) {
13626 tune_cmd_t tune_cmd;
13627 tune_cmd.type = SET_RELOAD_CHROMATIX;
13628 tune_cmd.module = MODULE_ALL;
13629 tune_cmd.value = TRUE;
13630 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13631 CAM_INTF_PARM_SET_VFE_COMMAND,
13632 tune_cmd);
13633
13634 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13635 CAM_INTF_PARM_SET_PP_COMMAND,
13636 tune_cmd);
13637 }
13638 m_MobicatMask = enableMobi;
13639
13640 return ret;
13641}
13642
13643/*===========================================================================
13644* FUNCTION : getLogLevel
13645*
13646* DESCRIPTION: Reads the log level property into a variable
13647*
13648* PARAMETERS :
13649* None
13650*
13651* RETURN :
13652* None
13653*==========================================================================*/
13654void QCamera3HardwareInterface::getLogLevel()
13655{
13656 char prop[PROPERTY_VALUE_MAX];
13657 uint32_t globalLogLevel = 0;
13658
13659 property_get("persist.camera.hal.debug", prop, "0");
13660 int val = atoi(prop);
13661 if (0 <= val) {
13662 gCamHal3LogLevel = (uint32_t)val;
13663 }
13664
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013665 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013666 gKpiDebugLevel = atoi(prop);
13667
13668 property_get("persist.camera.global.debug", prop, "0");
13669 val = atoi(prop);
13670 if (0 <= val) {
13671 globalLogLevel = (uint32_t)val;
13672 }
13673
13674 /* Highest log level among hal.logs and global.logs is selected */
13675 if (gCamHal3LogLevel < globalLogLevel)
13676 gCamHal3LogLevel = globalLogLevel;
13677
13678 return;
13679}
13680
13681/*===========================================================================
13682 * FUNCTION : validateStreamRotations
13683 *
13684 * DESCRIPTION: Check if the rotations requested are supported
13685 *
13686 * PARAMETERS :
13687 * @stream_list : streams to be configured
13688 *
13689 * RETURN : NO_ERROR on success
13690 * -EINVAL on failure
13691 *
13692 *==========================================================================*/
13693int QCamera3HardwareInterface::validateStreamRotations(
13694 camera3_stream_configuration_t *streamList)
13695{
13696 int rc = NO_ERROR;
13697
13698 /*
13699 * Loop through all streams requested in configuration
13700 * Check if unsupported rotations have been requested on any of them
13701 */
13702 for (size_t j = 0; j < streamList->num_streams; j++){
13703 camera3_stream_t *newStream = streamList->streams[j];
13704
Emilian Peev35ceeed2017-06-29 11:58:56 -070013705 switch(newStream->rotation) {
13706 case CAMERA3_STREAM_ROTATION_0:
13707 case CAMERA3_STREAM_ROTATION_90:
13708 case CAMERA3_STREAM_ROTATION_180:
13709 case CAMERA3_STREAM_ROTATION_270:
13710 //Expected values
13711 break;
13712 default:
13713 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13714 "type:%d and stream format:%d", __func__,
13715 newStream->rotation, newStream->stream_type,
13716 newStream->format);
13717 return -EINVAL;
13718 }
13719
Thierry Strudel3d639192016-09-09 11:52:26 -070013720 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13721 bool isImplDef = (newStream->format ==
13722 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13723 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13724 isImplDef);
13725
13726 if (isRotated && (!isImplDef || isZsl)) {
13727 LOGE("Error: Unsupported rotation of %d requested for stream"
13728 "type:%d and stream format:%d",
13729 newStream->rotation, newStream->stream_type,
13730 newStream->format);
13731 rc = -EINVAL;
13732 break;
13733 }
13734 }
13735
13736 return rc;
13737}
13738
13739/*===========================================================================
13740* FUNCTION : getFlashInfo
13741*
13742* DESCRIPTION: Retrieve information about whether the device has a flash.
13743*
13744* PARAMETERS :
13745* @cameraId : Camera id to query
13746* @hasFlash : Boolean indicating whether there is a flash device
13747* associated with given camera
13748* @flashNode : If a flash device exists, this will be its device node.
13749*
13750* RETURN :
13751* None
13752*==========================================================================*/
13753void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13754 bool& hasFlash,
13755 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13756{
13757 cam_capability_t* camCapability = gCamCapability[cameraId];
13758 if (NULL == camCapability) {
13759 hasFlash = false;
13760 flashNode[0] = '\0';
13761 } else {
13762 hasFlash = camCapability->flash_available;
13763 strlcpy(flashNode,
13764 (char*)camCapability->flash_dev_name,
13765 QCAMERA_MAX_FILEPATH_LENGTH);
13766 }
13767}
13768
13769/*===========================================================================
13770* FUNCTION : getEepromVersionInfo
13771*
13772* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13773*
13774* PARAMETERS : None
13775*
13776* RETURN : string describing EEPROM version
13777* "\0" if no such info available
13778*==========================================================================*/
13779const char *QCamera3HardwareInterface::getEepromVersionInfo()
13780{
13781 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13782}
13783
13784/*===========================================================================
13785* FUNCTION : getLdafCalib
13786*
13787* DESCRIPTION: Retrieve Laser AF calibration data
13788*
13789* PARAMETERS : None
13790*
13791* RETURN : Two uint32_t describing laser AF calibration data
13792* NULL if none is available.
13793*==========================================================================*/
13794const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13795{
13796 if (mLdafCalibExist) {
13797 return &mLdafCalib[0];
13798 } else {
13799 return NULL;
13800 }
13801}
13802
13803/*===========================================================================
13804 * FUNCTION : dynamicUpdateMetaStreamInfo
13805 *
13806 * DESCRIPTION: This function:
13807 * (1) stops all the channels
13808 * (2) returns error on pending requests and buffers
13809 * (3) sends metastream_info in setparams
13810 * (4) starts all channels
13811 * This is useful when sensor has to be restarted to apply any
13812 * settings such as frame rate from a different sensor mode
13813 *
13814 * PARAMETERS : None
13815 *
13816 * RETURN : NO_ERROR on success
13817 * Error codes on failure
13818 *
13819 *==========================================================================*/
13820int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13821{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013822 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013823 int rc = NO_ERROR;
13824
13825 LOGD("E");
13826
13827 rc = stopAllChannels();
13828 if (rc < 0) {
13829 LOGE("stopAllChannels failed");
13830 return rc;
13831 }
13832
13833 rc = notifyErrorForPendingRequests();
13834 if (rc < 0) {
13835 LOGE("notifyErrorForPendingRequests failed");
13836 return rc;
13837 }
13838
13839 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13840 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13841 "Format:%d",
13842 mStreamConfigInfo.type[i],
13843 mStreamConfigInfo.stream_sizes[i].width,
13844 mStreamConfigInfo.stream_sizes[i].height,
13845 mStreamConfigInfo.postprocess_mask[i],
13846 mStreamConfigInfo.format[i]);
13847 }
13848
13849 /* Send meta stream info once again so that ISP can start */
13850 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13851 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13852 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13853 mParameters);
13854 if (rc < 0) {
13855 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13856 }
13857
13858 rc = startAllChannels();
13859 if (rc < 0) {
13860 LOGE("startAllChannels failed");
13861 return rc;
13862 }
13863
13864 LOGD("X");
13865 return rc;
13866}
13867
13868/*===========================================================================
13869 * FUNCTION : stopAllChannels
13870 *
13871 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13872 *
13873 * PARAMETERS : None
13874 *
13875 * RETURN : NO_ERROR on success
13876 * Error codes on failure
13877 *
13878 *==========================================================================*/
13879int32_t QCamera3HardwareInterface::stopAllChannels()
13880{
13881 int32_t rc = NO_ERROR;
13882
13883 LOGD("Stopping all channels");
13884 // Stop the Streams/Channels
13885 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13886 it != mStreamInfo.end(); it++) {
13887 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13888 if (channel) {
13889 channel->stop();
13890 }
13891 (*it)->status = INVALID;
13892 }
13893
13894 if (mSupportChannel) {
13895 mSupportChannel->stop();
13896 }
13897 if (mAnalysisChannel) {
13898 mAnalysisChannel->stop();
13899 }
13900 if (mRawDumpChannel) {
13901 mRawDumpChannel->stop();
13902 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013903 if (mHdrPlusRawSrcChannel) {
13904 mHdrPlusRawSrcChannel->stop();
13905 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013906 if (mMetadataChannel) {
13907 /* If content of mStreamInfo is not 0, there is metadata stream */
13908 mMetadataChannel->stop();
13909 }
13910
13911 LOGD("All channels stopped");
13912 return rc;
13913}
13914
13915/*===========================================================================
13916 * FUNCTION : startAllChannels
13917 *
13918 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13919 *
13920 * PARAMETERS : None
13921 *
13922 * RETURN : NO_ERROR on success
13923 * Error codes on failure
13924 *
13925 *==========================================================================*/
13926int32_t QCamera3HardwareInterface::startAllChannels()
13927{
13928 int32_t rc = NO_ERROR;
13929
13930 LOGD("Start all channels ");
13931 // Start the Streams/Channels
13932 if (mMetadataChannel) {
13933 /* If content of mStreamInfo is not 0, there is metadata stream */
13934 rc = mMetadataChannel->start();
13935 if (rc < 0) {
13936 LOGE("META channel start failed");
13937 return rc;
13938 }
13939 }
13940 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13941 it != mStreamInfo.end(); it++) {
13942 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13943 if (channel) {
13944 rc = channel->start();
13945 if (rc < 0) {
13946 LOGE("channel start failed");
13947 return rc;
13948 }
13949 }
13950 }
13951 if (mAnalysisChannel) {
13952 mAnalysisChannel->start();
13953 }
13954 if (mSupportChannel) {
13955 rc = mSupportChannel->start();
13956 if (rc < 0) {
13957 LOGE("Support channel start failed");
13958 return rc;
13959 }
13960 }
13961 if (mRawDumpChannel) {
13962 rc = mRawDumpChannel->start();
13963 if (rc < 0) {
13964 LOGE("RAW dump channel start failed");
13965 return rc;
13966 }
13967 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013968 if (mHdrPlusRawSrcChannel) {
13969 rc = mHdrPlusRawSrcChannel->start();
13970 if (rc < 0) {
13971 LOGE("HDR+ RAW channel start failed");
13972 return rc;
13973 }
13974 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013975
13976 LOGD("All channels started");
13977 return rc;
13978}
13979
13980/*===========================================================================
13981 * FUNCTION : notifyErrorForPendingRequests
13982 *
13983 * DESCRIPTION: This function sends error for all the pending requests/buffers
13984 *
13985 * PARAMETERS : None
13986 *
13987 * RETURN : Error codes
13988 * NO_ERROR on success
13989 *
13990 *==========================================================================*/
13991int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13992{
Emilian Peev7650c122017-01-19 08:24:33 -080013993 notifyErrorFoPendingDepthData(mDepthChannel);
13994
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013995 auto pendingRequest = mPendingRequestsList.begin();
13996 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013997
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013998 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13999 // buffers (for which buffers aren't sent yet).
14000 while (pendingRequest != mPendingRequestsList.end() ||
14001 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14002 if (pendingRequest == mPendingRequestsList.end() ||
14003 pendingBuffer->frame_number < pendingRequest->frame_number) {
14004 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14005 // with error.
14006 for (auto &info : pendingBuffer->mPendingBufferList) {
14007 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014008 camera3_notify_msg_t notify_msg;
14009 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14010 notify_msg.type = CAMERA3_MSG_ERROR;
14011 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014012 notify_msg.message.error.error_stream = info.stream;
14013 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014014 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014015
14016 camera3_stream_buffer_t buffer = {};
14017 buffer.acquire_fence = -1;
14018 buffer.release_fence = -1;
14019 buffer.buffer = info.buffer;
14020 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14021 buffer.stream = info.stream;
14022 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014023 }
14024
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014025 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14026 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14027 pendingBuffer->frame_number > pendingRequest->frame_number) {
14028 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014029 camera3_notify_msg_t notify_msg;
14030 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14031 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014032 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14033 notify_msg.message.error.error_stream = nullptr;
14034 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014035 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014036
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014037 if (pendingRequest->input_buffer != nullptr) {
14038 camera3_capture_result result = {};
14039 result.frame_number = pendingRequest->frame_number;
14040 result.result = nullptr;
14041 result.input_buffer = pendingRequest->input_buffer;
14042 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014043 }
14044
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014045 mShutterDispatcher.clear(pendingRequest->frame_number);
14046 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14047 } else {
14048 // If both buffers and result metadata weren't sent yet, notify about a request error
14049 // and return buffers with error.
14050 for (auto &info : pendingBuffer->mPendingBufferList) {
14051 camera3_notify_msg_t notify_msg;
14052 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14053 notify_msg.type = CAMERA3_MSG_ERROR;
14054 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14055 notify_msg.message.error.error_stream = info.stream;
14056 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14057 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014058
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014059 camera3_stream_buffer_t buffer = {};
14060 buffer.acquire_fence = -1;
14061 buffer.release_fence = -1;
14062 buffer.buffer = info.buffer;
14063 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14064 buffer.stream = info.stream;
14065 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14066 }
14067
14068 if (pendingRequest->input_buffer != nullptr) {
14069 camera3_capture_result result = {};
14070 result.frame_number = pendingRequest->frame_number;
14071 result.result = nullptr;
14072 result.input_buffer = pendingRequest->input_buffer;
14073 orchestrateResult(&result);
14074 }
14075
14076 mShutterDispatcher.clear(pendingRequest->frame_number);
14077 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14078 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014079 }
14080 }
14081
14082 /* Reset pending frame Drop list and requests list */
14083 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014084 mShutterDispatcher.clear();
14085 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014086 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014087 LOGH("Cleared all the pending buffers ");
14088
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014089 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014090}
14091
14092bool QCamera3HardwareInterface::isOnEncoder(
14093 const cam_dimension_t max_viewfinder_size,
14094 uint32_t width, uint32_t height)
14095{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014096 return ((width > (uint32_t)max_viewfinder_size.width) ||
14097 (height > (uint32_t)max_viewfinder_size.height) ||
14098 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14099 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014100}
14101
14102/*===========================================================================
14103 * FUNCTION : setBundleInfo
14104 *
14105 * DESCRIPTION: Set bundle info for all streams that are bundle.
14106 *
14107 * PARAMETERS : None
14108 *
14109 * RETURN : NO_ERROR on success
14110 * Error codes on failure
14111 *==========================================================================*/
14112int32_t QCamera3HardwareInterface::setBundleInfo()
14113{
14114 int32_t rc = NO_ERROR;
14115
14116 if (mChannelHandle) {
14117 cam_bundle_config_t bundleInfo;
14118 memset(&bundleInfo, 0, sizeof(bundleInfo));
14119 rc = mCameraHandle->ops->get_bundle_info(
14120 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14121 if (rc != NO_ERROR) {
14122 LOGE("get_bundle_info failed");
14123 return rc;
14124 }
14125 if (mAnalysisChannel) {
14126 mAnalysisChannel->setBundleInfo(bundleInfo);
14127 }
14128 if (mSupportChannel) {
14129 mSupportChannel->setBundleInfo(bundleInfo);
14130 }
14131 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14132 it != mStreamInfo.end(); it++) {
14133 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14134 channel->setBundleInfo(bundleInfo);
14135 }
14136 if (mRawDumpChannel) {
14137 mRawDumpChannel->setBundleInfo(bundleInfo);
14138 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014139 if (mHdrPlusRawSrcChannel) {
14140 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14141 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014142 }
14143
14144 return rc;
14145}
14146
14147/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014148 * FUNCTION : setInstantAEC
14149 *
14150 * DESCRIPTION: Set Instant AEC related params.
14151 *
14152 * PARAMETERS :
14153 * @meta: CameraMetadata reference
14154 *
14155 * RETURN : NO_ERROR on success
14156 * Error codes on failure
14157 *==========================================================================*/
14158int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14159{
14160 int32_t rc = NO_ERROR;
14161 uint8_t val = 0;
14162 char prop[PROPERTY_VALUE_MAX];
14163
14164 // First try to configure instant AEC from framework metadata
14165 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14166 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14167 }
14168
14169 // If framework did not set this value, try to read from set prop.
14170 if (val == 0) {
14171 memset(prop, 0, sizeof(prop));
14172 property_get("persist.camera.instant.aec", prop, "0");
14173 val = (uint8_t)atoi(prop);
14174 }
14175
14176 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14177 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14178 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14179 mInstantAEC = val;
14180 mInstantAECSettledFrameNumber = 0;
14181 mInstantAecFrameIdxCount = 0;
14182 LOGH("instantAEC value set %d",val);
14183 if (mInstantAEC) {
14184 memset(prop, 0, sizeof(prop));
14185 property_get("persist.camera.ae.instant.bound", prop, "10");
14186 int32_t aec_frame_skip_cnt = atoi(prop);
14187 if (aec_frame_skip_cnt >= 0) {
14188 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14189 } else {
14190 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14191 rc = BAD_VALUE;
14192 }
14193 }
14194 } else {
14195 LOGE("Bad instant aec value set %d", val);
14196 rc = BAD_VALUE;
14197 }
14198 return rc;
14199}
14200
14201/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014202 * FUNCTION : get_num_overall_buffers
14203 *
14204 * DESCRIPTION: Estimate number of pending buffers across all requests.
14205 *
14206 * PARAMETERS : None
14207 *
14208 * RETURN : Number of overall pending buffers
14209 *
14210 *==========================================================================*/
14211uint32_t PendingBuffersMap::get_num_overall_buffers()
14212{
14213 uint32_t sum_buffers = 0;
14214 for (auto &req : mPendingBuffersInRequest) {
14215 sum_buffers += req.mPendingBufferList.size();
14216 }
14217 return sum_buffers;
14218}
14219
14220/*===========================================================================
14221 * FUNCTION : removeBuf
14222 *
14223 * DESCRIPTION: Remove a matching buffer from tracker.
14224 *
14225 * PARAMETERS : @buffer: image buffer for the callback
14226 *
14227 * RETURN : None
14228 *
14229 *==========================================================================*/
14230void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14231{
14232 bool buffer_found = false;
14233 for (auto req = mPendingBuffersInRequest.begin();
14234 req != mPendingBuffersInRequest.end(); req++) {
14235 for (auto k = req->mPendingBufferList.begin();
14236 k != req->mPendingBufferList.end(); k++ ) {
14237 if (k->buffer == buffer) {
14238 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14239 req->frame_number, buffer);
14240 k = req->mPendingBufferList.erase(k);
14241 if (req->mPendingBufferList.empty()) {
14242 // Remove this request from Map
14243 req = mPendingBuffersInRequest.erase(req);
14244 }
14245 buffer_found = true;
14246 break;
14247 }
14248 }
14249 if (buffer_found) {
14250 break;
14251 }
14252 }
14253 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14254 get_num_overall_buffers());
14255}
14256
14257/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014258 * FUNCTION : getBufErrStatus
14259 *
14260 * DESCRIPTION: get buffer error status
14261 *
14262 * PARAMETERS : @buffer: buffer handle
14263 *
14264 * RETURN : Error status
14265 *
14266 *==========================================================================*/
14267int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14268{
14269 for (auto& req : mPendingBuffersInRequest) {
14270 for (auto& k : req.mPendingBufferList) {
14271 if (k.buffer == buffer)
14272 return k.bufStatus;
14273 }
14274 }
14275 return CAMERA3_BUFFER_STATUS_OK;
14276}
14277
14278/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014279 * FUNCTION : setPAAFSupport
14280 *
14281 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14282 * feature mask according to stream type and filter
14283 * arrangement
14284 *
14285 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14286 * @stream_type: stream type
14287 * @filter_arrangement: filter arrangement
14288 *
14289 * RETURN : None
14290 *==========================================================================*/
14291void QCamera3HardwareInterface::setPAAFSupport(
14292 cam_feature_mask_t& feature_mask,
14293 cam_stream_type_t stream_type,
14294 cam_color_filter_arrangement_t filter_arrangement)
14295{
Thierry Strudel3d639192016-09-09 11:52:26 -070014296 switch (filter_arrangement) {
14297 case CAM_FILTER_ARRANGEMENT_RGGB:
14298 case CAM_FILTER_ARRANGEMENT_GRBG:
14299 case CAM_FILTER_ARRANGEMENT_GBRG:
14300 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014301 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14302 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014303 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014304 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14305 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014306 }
14307 break;
14308 case CAM_FILTER_ARRANGEMENT_Y:
14309 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14310 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14311 }
14312 break;
14313 default:
14314 break;
14315 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014316 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14317 feature_mask, stream_type, filter_arrangement);
14318
14319
Thierry Strudel3d639192016-09-09 11:52:26 -070014320}
14321
14322/*===========================================================================
14323* FUNCTION : getSensorMountAngle
14324*
14325* DESCRIPTION: Retrieve sensor mount angle
14326*
14327* PARAMETERS : None
14328*
14329* RETURN : sensor mount angle in uint32_t
14330*==========================================================================*/
14331uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14332{
14333 return gCamCapability[mCameraId]->sensor_mount_angle;
14334}
14335
14336/*===========================================================================
14337* FUNCTION : getRelatedCalibrationData
14338*
14339* DESCRIPTION: Retrieve related system calibration data
14340*
14341* PARAMETERS : None
14342*
14343* RETURN : Pointer of related system calibration data
14344*==========================================================================*/
14345const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14346{
14347 return (const cam_related_system_calibration_data_t *)
14348 &(gCamCapability[mCameraId]->related_cam_calibration);
14349}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014350
14351/*===========================================================================
14352 * FUNCTION : is60HzZone
14353 *
14354 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14355 *
14356 * PARAMETERS : None
14357 *
14358 * RETURN : True if in 60Hz zone, False otherwise
14359 *==========================================================================*/
14360bool QCamera3HardwareInterface::is60HzZone()
14361{
14362 time_t t = time(NULL);
14363 struct tm lt;
14364
14365 struct tm* r = localtime_r(&t, &lt);
14366
14367 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14368 return true;
14369 else
14370 return false;
14371}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014372
14373/*===========================================================================
14374 * FUNCTION : adjustBlackLevelForCFA
14375 *
14376 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14377 * of bayer CFA (Color Filter Array).
14378 *
14379 * PARAMETERS : @input: black level pattern in the order of RGGB
14380 * @output: black level pattern in the order of CFA
14381 * @color_arrangement: CFA color arrangement
14382 *
14383 * RETURN : None
14384 *==========================================================================*/
14385template<typename T>
14386void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14387 T input[BLACK_LEVEL_PATTERN_CNT],
14388 T output[BLACK_LEVEL_PATTERN_CNT],
14389 cam_color_filter_arrangement_t color_arrangement)
14390{
14391 switch (color_arrangement) {
14392 case CAM_FILTER_ARRANGEMENT_GRBG:
14393 output[0] = input[1];
14394 output[1] = input[0];
14395 output[2] = input[3];
14396 output[3] = input[2];
14397 break;
14398 case CAM_FILTER_ARRANGEMENT_GBRG:
14399 output[0] = input[2];
14400 output[1] = input[3];
14401 output[2] = input[0];
14402 output[3] = input[1];
14403 break;
14404 case CAM_FILTER_ARRANGEMENT_BGGR:
14405 output[0] = input[3];
14406 output[1] = input[2];
14407 output[2] = input[1];
14408 output[3] = input[0];
14409 break;
14410 case CAM_FILTER_ARRANGEMENT_RGGB:
14411 output[0] = input[0];
14412 output[1] = input[1];
14413 output[2] = input[2];
14414 output[3] = input[3];
14415 break;
14416 default:
14417 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14418 break;
14419 }
14420}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014421
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014422void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14423 CameraMetadata &resultMetadata,
14424 std::shared_ptr<metadata_buffer_t> settings)
14425{
14426 if (settings == nullptr) {
14427 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14428 return;
14429 }
14430
14431 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14432 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14433 }
14434
14435 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14436 String8 str((const char *)gps_methods);
14437 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14438 }
14439
14440 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14441 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14442 }
14443
14444 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14445 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14446 }
14447
14448 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14449 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14450 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14451 }
14452
14453 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14454 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14455 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14456 }
14457
14458 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14459 int32_t fwk_thumb_size[2];
14460 fwk_thumb_size[0] = thumb_size->width;
14461 fwk_thumb_size[1] = thumb_size->height;
14462 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14463 }
14464
14465 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14466 uint8_t fwk_intent = intent[0];
14467 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14468 }
14469}
14470
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014471bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14472 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14473 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014474{
14475 if (hdrPlusRequest == nullptr) return false;
14476
14477 // Check noise reduction mode is high quality.
14478 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14479 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14480 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014481 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14482 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014483 return false;
14484 }
14485
14486 // Check edge mode is high quality.
14487 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14488 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14489 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14490 return false;
14491 }
14492
14493 if (request.num_output_buffers != 1 ||
14494 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14495 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014496 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14497 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14498 request.output_buffers[0].stream->width,
14499 request.output_buffers[0].stream->height,
14500 request.output_buffers[0].stream->format);
14501 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014502 return false;
14503 }
14504
14505 // Get a YUV buffer from pic channel.
14506 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14507 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14508 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14509 if (res != OK) {
14510 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14511 __FUNCTION__, strerror(-res), res);
14512 return false;
14513 }
14514
14515 pbcamera::StreamBuffer buffer;
14516 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014517 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014518 buffer.data = yuvBuffer->buffer;
14519 buffer.dataSize = yuvBuffer->frame_len;
14520
14521 pbcamera::CaptureRequest pbRequest;
14522 pbRequest.id = request.frame_number;
14523 pbRequest.outputBuffers.push_back(buffer);
14524
14525 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014526 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014527 if (res != OK) {
14528 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14529 strerror(-res), res);
14530 return false;
14531 }
14532
14533 hdrPlusRequest->yuvBuffer = yuvBuffer;
14534 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14535
14536 return true;
14537}
14538
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014539status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14540{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014541 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14542 return OK;
14543 }
14544
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014545 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014546 if (res != OK) {
14547 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14548 strerror(-res), res);
14549 return res;
14550 }
14551 gHdrPlusClientOpening = true;
14552
14553 return OK;
14554}
14555
Chien-Yu Chenee335912017-02-09 17:53:20 -080014556status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14557{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014558 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014559
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014560 if (mHdrPlusModeEnabled) {
14561 return OK;
14562 }
14563
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014564 // Check if gHdrPlusClient is opened or being opened.
14565 if (gHdrPlusClient == nullptr) {
14566 if (gHdrPlusClientOpening) {
14567 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14568 return OK;
14569 }
14570
14571 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014572 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014573 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14574 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014575 return res;
14576 }
14577
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014578 // When opening HDR+ client completes, HDR+ mode will be enabled.
14579 return OK;
14580
Chien-Yu Chenee335912017-02-09 17:53:20 -080014581 }
14582
14583 // Configure stream for HDR+.
14584 res = configureHdrPlusStreamsLocked();
14585 if (res != OK) {
14586 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014587 return res;
14588 }
14589
14590 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14591 res = gHdrPlusClient->setZslHdrPlusMode(true);
14592 if (res != OK) {
14593 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014594 return res;
14595 }
14596
14597 mHdrPlusModeEnabled = true;
14598 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14599
14600 return OK;
14601}
14602
14603void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14604{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014605 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014606 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014607 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14608 if (res != OK) {
14609 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14610 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014611
14612 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014613 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014614 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014615 }
14616
14617 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014618 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014619 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14620}
14621
14622status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014623{
14624 pbcamera::InputConfiguration inputConfig;
14625 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14626 status_t res = OK;
14627
14628 // Configure HDR+ client streams.
14629 // Get input config.
14630 if (mHdrPlusRawSrcChannel) {
14631 // HDR+ input buffers will be provided by HAL.
14632 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14633 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14634 if (res != OK) {
14635 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14636 __FUNCTION__, strerror(-res), res);
14637 return res;
14638 }
14639
14640 inputConfig.isSensorInput = false;
14641 } else {
14642 // Sensor MIPI will send data to Easel.
14643 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014644 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014645 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14646 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14647 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14648 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14649 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014650 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014651 if (mSensorModeInfo.num_raw_bits != 10) {
14652 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14653 mSensorModeInfo.num_raw_bits);
14654 return BAD_VALUE;
14655 }
14656
14657 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014658 }
14659
14660 // Get output configurations.
14661 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014662 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014663
14664 // Easel may need to output YUV output buffers if mPictureChannel was created.
14665 pbcamera::StreamConfiguration yuvOutputConfig;
14666 if (mPictureChannel != nullptr) {
14667 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14668 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14669 if (res != OK) {
14670 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14671 __FUNCTION__, strerror(-res), res);
14672
14673 return res;
14674 }
14675
14676 outputStreamConfigs.push_back(yuvOutputConfig);
14677 }
14678
14679 // TODO: consider other channels for YUV output buffers.
14680
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014681 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014682 if (res != OK) {
14683 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14684 strerror(-res), res);
14685 return res;
14686 }
14687
14688 return OK;
14689}
14690
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014691void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14692{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014693 if (client == nullptr) {
14694 ALOGE("%s: Opened client is null.", __FUNCTION__);
14695 return;
14696 }
14697
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014698 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014699 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14700
14701 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014702 if (!gHdrPlusClientOpening) {
14703 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14704 return;
14705 }
14706
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014707 gHdrPlusClient = std::move(client);
14708 gHdrPlusClientOpening = false;
14709
14710 // Set static metadata.
14711 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14712 if (res != OK) {
14713 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14714 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014715 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014716 gHdrPlusClient = nullptr;
14717 return;
14718 }
14719
14720 // Enable HDR+ mode.
14721 res = enableHdrPlusModeLocked();
14722 if (res != OK) {
14723 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14724 }
14725}
14726
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014727void QCamera3HardwareInterface::onOpenFailed(status_t err)
14728{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014729 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14730 Mutex::Autolock l(gHdrPlusClientLock);
14731 gHdrPlusClientOpening = false;
14732}
14733
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014734void QCamera3HardwareInterface::onFatalError()
14735{
14736 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14737
14738 // Set HAL state to error.
14739 pthread_mutex_lock(&mMutex);
14740 mState = ERROR;
14741 pthread_mutex_unlock(&mMutex);
14742
14743 handleCameraDeviceError();
14744}
14745
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014746void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014747 const camera_metadata_t &resultMetadata)
14748{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014749 if (result != nullptr) {
14750 if (result->outputBuffers.size() != 1) {
14751 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14752 result->outputBuffers.size());
14753 return;
14754 }
14755
14756 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14757 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14758 result->outputBuffers[0].streamId);
14759 return;
14760 }
14761
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014762 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014763 HdrPlusPendingRequest pendingRequest;
14764 {
14765 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14766 auto req = mHdrPlusPendingRequests.find(result->requestId);
14767 pendingRequest = req->second;
14768 }
14769
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014770 // Update the result metadata with the settings of the HDR+ still capture request because
14771 // the result metadata belongs to a ZSL buffer.
14772 CameraMetadata metadata;
14773 metadata = &resultMetadata;
14774 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14775 camera_metadata_t* updatedResultMetadata = metadata.release();
14776
14777 QCamera3PicChannel *picChannel =
14778 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14779
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014780 // Check if dumping HDR+ YUV output is enabled.
14781 char prop[PROPERTY_VALUE_MAX];
14782 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14783 bool dumpYuvOutput = atoi(prop);
14784
14785 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014786 // Dump yuv buffer to a ppm file.
14787 pbcamera::StreamConfiguration outputConfig;
14788 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14789 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14790 if (rc == OK) {
14791 char buf[FILENAME_MAX] = {};
14792 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14793 result->requestId, result->outputBuffers[0].streamId,
14794 outputConfig.image.width, outputConfig.image.height);
14795
14796 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14797 } else {
14798 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14799 __FUNCTION__, strerror(-rc), rc);
14800 }
14801 }
14802
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014803 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14804 auto halMetadata = std::make_shared<metadata_buffer_t>();
14805 clear_metadata_buffer(halMetadata.get());
14806
14807 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14808 // encoding.
14809 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14810 halStreamId, /*minFrameDuration*/0);
14811 if (res == OK) {
14812 // Return the buffer to pic channel for encoding.
14813 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14814 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14815 halMetadata);
14816 } else {
14817 // Return the buffer without encoding.
14818 // TODO: This should not happen but we may want to report an error buffer to camera
14819 // service.
14820 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14821 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14822 strerror(-res), res);
14823 }
14824
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014825 // Find the timestamp
14826 camera_metadata_ro_entry_t entry;
14827 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14828 ANDROID_SENSOR_TIMESTAMP, &entry);
14829 if (res != OK) {
14830 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14831 __FUNCTION__, result->requestId, strerror(-res), res);
14832 } else {
14833 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14834 }
14835
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014836 // Send HDR+ metadata to framework.
14837 {
14838 pthread_mutex_lock(&mMutex);
14839
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014840 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14841 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014842 pthread_mutex_unlock(&mMutex);
14843 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014844
14845 // Remove the HDR+ pending request.
14846 {
14847 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14848 auto req = mHdrPlusPendingRequests.find(result->requestId);
14849 mHdrPlusPendingRequests.erase(req);
14850 }
14851 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014852}
14853
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014854void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14855{
14856 if (failedResult == nullptr) {
14857 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14858 return;
14859 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014860
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014861 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014862
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014863 // Remove the pending HDR+ request.
14864 {
14865 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14866 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14867
14868 // Return the buffer to pic channel.
14869 QCamera3PicChannel *picChannel =
14870 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14871 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14872
14873 mHdrPlusPendingRequests.erase(pendingRequest);
14874 }
14875
14876 pthread_mutex_lock(&mMutex);
14877
14878 // Find the pending buffers.
14879 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14880 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14881 if (pendingBuffers->frame_number == failedResult->requestId) {
14882 break;
14883 }
14884 pendingBuffers++;
14885 }
14886
14887 // Send out buffer errors for the pending buffers.
14888 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14889 std::vector<camera3_stream_buffer_t> streamBuffers;
14890 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14891 // Prepare a stream buffer.
14892 camera3_stream_buffer_t streamBuffer = {};
14893 streamBuffer.stream = buffer.stream;
14894 streamBuffer.buffer = buffer.buffer;
14895 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14896 streamBuffer.acquire_fence = -1;
14897 streamBuffer.release_fence = -1;
14898
14899 streamBuffers.push_back(streamBuffer);
14900
14901 // Send out error buffer event.
14902 camera3_notify_msg_t notify_msg = {};
14903 notify_msg.type = CAMERA3_MSG_ERROR;
14904 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14905 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14906 notify_msg.message.error.error_stream = buffer.stream;
14907
14908 orchestrateNotify(&notify_msg);
14909 }
14910
14911 camera3_capture_result_t result = {};
14912 result.frame_number = pendingBuffers->frame_number;
14913 result.num_output_buffers = streamBuffers.size();
14914 result.output_buffers = &streamBuffers[0];
14915
14916 // Send out result with buffer errors.
14917 orchestrateResult(&result);
14918
14919 // Remove pending buffers.
14920 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14921 }
14922
14923 // Remove pending request.
14924 auto halRequest = mPendingRequestsList.begin();
14925 while (halRequest != mPendingRequestsList.end()) {
14926 if (halRequest->frame_number == failedResult->requestId) {
14927 mPendingRequestsList.erase(halRequest);
14928 break;
14929 }
14930 halRequest++;
14931 }
14932
14933 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014934}
14935
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014936
14937ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14938 mParent(parent) {}
14939
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014940void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014941{
14942 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014943
14944 if (isReprocess) {
14945 mReprocessShutters.emplace(frameNumber, Shutter());
14946 } else {
14947 mShutters.emplace(frameNumber, Shutter());
14948 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014949}
14950
14951void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14952{
14953 std::lock_guard<std::mutex> lock(mLock);
14954
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014955 std::map<uint32_t, Shutter> *shutters = nullptr;
14956
14957 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014958 auto shutter = mShutters.find(frameNumber);
14959 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014960 shutter = mReprocessShutters.find(frameNumber);
14961 if (shutter == mReprocessShutters.end()) {
14962 // Shutter was already sent.
14963 return;
14964 }
14965 shutters = &mReprocessShutters;
14966 } else {
14967 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014968 }
14969
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014970 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014971 shutter->second.ready = true;
14972 shutter->second.timestamp = timestamp;
14973
14974 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014975 shutter = shutters->begin();
14976 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014977 if (!shutter->second.ready) {
14978 // If this shutter is not ready, the following shutters can't be sent.
14979 break;
14980 }
14981
14982 camera3_notify_msg_t msg = {};
14983 msg.type = CAMERA3_MSG_SHUTTER;
14984 msg.message.shutter.frame_number = shutter->first;
14985 msg.message.shutter.timestamp = shutter->second.timestamp;
14986 mParent->orchestrateNotify(&msg);
14987
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014988 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014989 }
14990}
14991
14992void ShutterDispatcher::clear(uint32_t frameNumber)
14993{
14994 std::lock_guard<std::mutex> lock(mLock);
14995 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070014996 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014997}
14998
14999void ShutterDispatcher::clear()
15000{
15001 std::lock_guard<std::mutex> lock(mLock);
15002
15003 // Log errors for stale shutters.
15004 for (auto &shutter : mShutters) {
15005 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15006 __FUNCTION__, shutter.first, shutter.second.ready,
15007 shutter.second.timestamp);
15008 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015009
15010 // Log errors for stale reprocess shutters.
15011 for (auto &shutter : mReprocessShutters) {
15012 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15013 __FUNCTION__, shutter.first, shutter.second.ready,
15014 shutter.second.timestamp);
15015 }
15016
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015017 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015018 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015019}
15020
15021OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15022 mParent(parent) {}
15023
15024status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15025{
15026 std::lock_guard<std::mutex> lock(mLock);
15027 mStreamBuffers.clear();
15028 if (!streamList) {
15029 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15030 return -EINVAL;
15031 }
15032
15033 // Create a "frame-number -> buffer" map for each stream.
15034 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15035 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15036 }
15037
15038 return OK;
15039}
15040
15041status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15042{
15043 std::lock_guard<std::mutex> lock(mLock);
15044
15045 // Find the "frame-number -> buffer" map for the stream.
15046 auto buffers = mStreamBuffers.find(stream);
15047 if (buffers == mStreamBuffers.end()) {
15048 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15049 return -EINVAL;
15050 }
15051
15052 // Create an unready buffer for this frame number.
15053 buffers->second.emplace(frameNumber, Buffer());
15054 return OK;
15055}
15056
15057void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15058 const camera3_stream_buffer_t &buffer)
15059{
15060 std::lock_guard<std::mutex> lock(mLock);
15061
15062 // Find the frame number -> buffer map for the stream.
15063 auto buffers = mStreamBuffers.find(buffer.stream);
15064 if (buffers == mStreamBuffers.end()) {
15065 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15066 return;
15067 }
15068
15069 // Find the unready buffer this frame number and mark it ready.
15070 auto pendingBuffer = buffers->second.find(frameNumber);
15071 if (pendingBuffer == buffers->second.end()) {
15072 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15073 return;
15074 }
15075
15076 pendingBuffer->second.ready = true;
15077 pendingBuffer->second.buffer = buffer;
15078
15079 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15080 pendingBuffer = buffers->second.begin();
15081 while (pendingBuffer != buffers->second.end()) {
15082 if (!pendingBuffer->second.ready) {
15083 // If this buffer is not ready, the following buffers can't be sent.
15084 break;
15085 }
15086
15087 camera3_capture_result_t result = {};
15088 result.frame_number = pendingBuffer->first;
15089 result.num_output_buffers = 1;
15090 result.output_buffers = &pendingBuffer->second.buffer;
15091
15092 // Send out result with buffer errors.
15093 mParent->orchestrateResult(&result);
15094
15095 pendingBuffer = buffers->second.erase(pendingBuffer);
15096 }
15097}
15098
15099void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15100{
15101 std::lock_guard<std::mutex> lock(mLock);
15102
15103 // Log errors for stale buffers.
15104 for (auto &buffers : mStreamBuffers) {
15105 for (auto &buffer : buffers.second) {
15106 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15107 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15108 }
15109 buffers.second.clear();
15110 }
15111
15112 if (clearConfiguredStreams) {
15113 mStreamBuffers.clear();
15114 }
15115}
15116
Thierry Strudel3d639192016-09-09 11:52:26 -070015117}; //end namespace qcamera