blob: b987e8a3dac748964e8aeb7dcb337141d41e89e0 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +010099#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Jason Lee8ce36fa2017-04-19 19:40:37 -0700116/* Face rect indices */
117#define FACE_LEFT 0
118#define FACE_TOP 1
119#define FACE_RIGHT 2
120#define FACE_BOTTOM 3
121#define FACE_WEIGHT 4
122
Thierry Strudel04e026f2016-10-10 11:27:36 -0700123/* Face landmarks indices */
124#define LEFT_EYE_X 0
125#define LEFT_EYE_Y 1
126#define RIGHT_EYE_X 2
127#define RIGHT_EYE_Y 3
128#define MOUTH_X 4
129#define MOUTH_Y 5
130#define TOTAL_LANDMARK_INDICES 6
131
Zhijun He2a5df222017-04-04 18:20:38 -0700132// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700133#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700134
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700135// Whether to check for the GPU stride padding, or use the default
136//#define CHECK_GPU_PIXEL_ALIGNMENT
137
Thierry Strudel3d639192016-09-09 11:52:26 -0700138cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
139const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
140extern pthread_mutex_t gCamLock;
141volatile uint32_t gCamHal3LogLevel = 1;
142extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800144// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700146std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
148std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
149bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700150std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700151bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700152bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800154// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
155bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700157std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Thierry Strudel3d639192016-09-09 11:52:26 -0700159
160const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
161 {"On", CAM_CDS_MODE_ON},
162 {"Off", CAM_CDS_MODE_OFF},
163 {"Auto",CAM_CDS_MODE_AUTO}
164};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_video_hdr_mode_t,
167 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
168 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
169 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
170};
171
Thierry Strudel54dc9782017-02-15 12:12:10 -0800172const QCamera3HardwareInterface::QCameraMap<
173 camera_metadata_enum_android_binning_correction_mode_t,
174 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
175 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
176 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
177};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_ir_mode_t,
181 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
182 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
183 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
184 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
185};
Thierry Strudel3d639192016-09-09 11:52:26 -0700186
187const QCamera3HardwareInterface::QCameraMap<
188 camera_metadata_enum_android_control_effect_mode_t,
189 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
190 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
191 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
192 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
193 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
194 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
195 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
197 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
198 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202 camera_metadata_enum_android_control_awb_mode_t,
203 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
204 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
205 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
206 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
207 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
208 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
209 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
210 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
211 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
212 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_scene_mode_t,
217 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
218 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
219 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
220 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
222 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
223 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
225 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
226 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
227 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
228 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
229 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
230 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
231 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
232 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800233 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
234 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700235};
236
237const QCamera3HardwareInterface::QCameraMap<
238 camera_metadata_enum_android_control_af_mode_t,
239 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
240 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
241 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
242 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
243 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
244 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
245 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
246 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
247};
248
249const QCamera3HardwareInterface::QCameraMap<
250 camera_metadata_enum_android_color_correction_aberration_mode_t,
251 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
252 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
253 CAM_COLOR_CORRECTION_ABERRATION_OFF },
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
255 CAM_COLOR_CORRECTION_ABERRATION_FAST },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
257 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
258};
259
260const QCamera3HardwareInterface::QCameraMap<
261 camera_metadata_enum_android_control_ae_antibanding_mode_t,
262 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
264 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
267};
268
269const QCamera3HardwareInterface::QCameraMap<
270 camera_metadata_enum_android_control_ae_mode_t,
271 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
272 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
273 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
274 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
275 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
277 { (camera_metadata_enum_android_control_ae_mode_t)
278 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700279};
280
281const QCamera3HardwareInterface::QCameraMap<
282 camera_metadata_enum_android_flash_mode_t,
283 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
284 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
285 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
286 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
287};
288
289const QCamera3HardwareInterface::QCameraMap<
290 camera_metadata_enum_android_statistics_face_detect_mode_t,
291 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
295};
296
297const QCamera3HardwareInterface::QCameraMap<
298 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
299 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
301 CAM_FOCUS_UNCALIBRATED },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
303 CAM_FOCUS_APPROXIMATE },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
305 CAM_FOCUS_CALIBRATED }
306};
307
308const QCamera3HardwareInterface::QCameraMap<
309 camera_metadata_enum_android_lens_state_t,
310 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
311 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
312 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
313};
314
315const int32_t available_thumbnail_sizes[] = {0, 0,
316 176, 144,
317 240, 144,
318 256, 144,
319 240, 160,
320 256, 154,
321 240, 240,
322 320, 240};
323
324const QCamera3HardwareInterface::QCameraMap<
325 camera_metadata_enum_android_sensor_test_pattern_mode_t,
326 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
333};
334
335/* Since there is no mapping for all the options some Android enum are not listed.
336 * Also, the order in this list is important because while mapping from HAL to Android it will
337 * traverse from lower to higher index which means that for HAL values that are map to different
338 * Android values, the traverse logic will select the first one found.
339 */
340const QCamera3HardwareInterface::QCameraMap<
341 camera_metadata_enum_android_sensor_reference_illuminant1_t,
342 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
359};
360
361const QCamera3HardwareInterface::QCameraMap<
362 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
363 { 60, CAM_HFR_MODE_60FPS},
364 { 90, CAM_HFR_MODE_90FPS},
365 { 120, CAM_HFR_MODE_120FPS},
366 { 150, CAM_HFR_MODE_150FPS},
367 { 180, CAM_HFR_MODE_180FPS},
368 { 210, CAM_HFR_MODE_210FPS},
369 { 240, CAM_HFR_MODE_240FPS},
370 { 480, CAM_HFR_MODE_480FPS},
371};
372
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700373const QCamera3HardwareInterface::QCameraMap<
374 qcamera3_ext_instant_aec_mode_t,
375 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
376 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
377 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
379};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800380
381const QCamera3HardwareInterface::QCameraMap<
382 qcamera3_ext_exposure_meter_mode_t,
383 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
384 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
386 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
387 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
388 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
390 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
391};
392
393const QCamera3HardwareInterface::QCameraMap<
394 qcamera3_ext_iso_mode_t,
395 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
396 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
397 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
398 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
399 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
400 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
401 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
402 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
403 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
404};
405
Thierry Strudel3d639192016-09-09 11:52:26 -0700406camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
407 .initialize = QCamera3HardwareInterface::initialize,
408 .configure_streams = QCamera3HardwareInterface::configure_streams,
409 .register_stream_buffers = NULL,
410 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
411 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
412 .get_metadata_vendor_tag_ops = NULL,
413 .dump = QCamera3HardwareInterface::dump,
414 .flush = QCamera3HardwareInterface::flush,
415 .reserved = {0},
416};
417
418// initialise to some default value
419uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
420
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700421static inline void logEaselEvent(const char *tag, const char *event) {
422 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
423 struct timespec ts = {};
424 static int64_t kMsPerSec = 1000;
425 static int64_t kNsPerMs = 1000000;
426 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
427 if (res != OK) {
428 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
429 } else {
430 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
431 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
432 }
433 }
434}
435
Thierry Strudel3d639192016-09-09 11:52:26 -0700436/*===========================================================================
437 * FUNCTION : QCamera3HardwareInterface
438 *
439 * DESCRIPTION: constructor of QCamera3HardwareInterface
440 *
441 * PARAMETERS :
442 * @cameraId : camera ID
443 *
444 * RETURN : none
445 *==========================================================================*/
446QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
447 const camera_module_callbacks_t *callbacks)
448 : mCameraId(cameraId),
449 mCameraHandle(NULL),
450 mCameraInitialized(false),
451 mCallbackOps(NULL),
452 mMetadataChannel(NULL),
453 mPictureChannel(NULL),
454 mRawChannel(NULL),
455 mSupportChannel(NULL),
456 mAnalysisChannel(NULL),
457 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700458 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700459 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800460 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100461 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800462 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mChannelHandle(0),
464 mFirstConfiguration(true),
465 mFlush(false),
466 mFlushPerf(false),
467 mParamHeap(NULL),
468 mParameters(NULL),
469 mPrevParameters(NULL),
470 m_bIsVideo(false),
471 m_bIs4KVideo(false),
472 m_bEisSupportedSize(false),
473 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800474 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700476 mShutterDispatcher(this),
477 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mMinProcessedFrameDuration(0),
479 mMinJpegFrameDuration(0),
480 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100481 mExpectedFrameDuration(0),
482 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700483 mMetaFrameCount(0U),
484 mUpdateDebugLevel(false),
485 mCallbacks(callbacks),
486 mCaptureIntent(0),
487 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700488 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800489 /* DevCamDebug metadata internal m control*/
490 mDevCamDebugMetaEnable(0),
491 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700492 mBatchSize(0),
493 mToBeQueuedVidBufs(0),
494 mHFRVideoFps(DEFAULT_VIDEO_FPS),
495 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800496 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800497 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700498 mFirstFrameNumberInBatch(0),
499 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800500 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700501 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
502 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000503 mPDSupported(false),
504 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700505 mInstantAEC(false),
506 mResetInstantAEC(false),
507 mInstantAECSettledFrameNumber(0),
508 mAecSkipDisplayFrameBound(0),
509 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700510 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800511 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700514 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mState(CLOSED),
516 mIsDeviceLinked(false),
517 mIsMainCamera(true),
518 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700519 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800520 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800521 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700522 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800523 mIsApInputUsedForHdrPlus(false),
524 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700525 m_bSensorHDREnabled(false),
526 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700527{
528 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700529 mCommon.init(gCamCapability[cameraId]);
530 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700531#ifndef USE_HAL_3_3
532 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
533#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700535#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700536 mCameraDevice.common.close = close_camera_device;
537 mCameraDevice.ops = &mCameraOps;
538 mCameraDevice.priv = this;
539 gCamCapability[cameraId]->version = CAM_HAL_V3;
540 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
541 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
542 gCamCapability[cameraId]->min_num_pp_bufs = 3;
543
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800544 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700545
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800546 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700547 mPendingLiveRequest = 0;
548 mCurrentRequestId = -1;
549 pthread_mutex_init(&mMutex, NULL);
550
551 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
552 mDefaultMetadata[i] = NULL;
553
554 // Getting system props of different kinds
555 char prop[PROPERTY_VALUE_MAX];
556 memset(prop, 0, sizeof(prop));
557 property_get("persist.camera.raw.dump", prop, "0");
558 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800559 property_get("persist.camera.hal3.force.hdr", prop, "0");
560 mForceHdrSnapshot = atoi(prop);
561
Thierry Strudel3d639192016-09-09 11:52:26 -0700562 if (mEnableRawDump)
563 LOGD("Raw dump from Camera HAL enabled");
564
565 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
566 memset(mLdafCalib, 0, sizeof(mLdafCalib));
567
568 memset(prop, 0, sizeof(prop));
569 property_get("persist.camera.tnr.preview", prop, "0");
570 m_bTnrPreview = (uint8_t)atoi(prop);
571
572 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800573 property_get("persist.camera.swtnr.preview", prop, "1");
574 m_bSwTnrPreview = (uint8_t)atoi(prop);
575
576 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700577 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700578 m_bTnrVideo = (uint8_t)atoi(prop);
579
580 memset(prop, 0, sizeof(prop));
581 property_get("persist.camera.avtimer.debug", prop, "0");
582 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800583 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700584
Thierry Strudel54dc9782017-02-15 12:12:10 -0800585 memset(prop, 0, sizeof(prop));
586 property_get("persist.camera.cacmode.disable", prop, "0");
587 m_cacModeDisabled = (uint8_t)atoi(prop);
588
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700589 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700590 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700591
Thierry Strudel3d639192016-09-09 11:52:26 -0700592 //Load and read GPU library.
593 lib_surface_utils = NULL;
594 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700595 mSurfaceStridePadding = CAM_PAD_TO_64;
596#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700597 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
598 if (lib_surface_utils) {
599 *(void **)&LINK_get_surface_pixel_alignment =
600 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
601 if (LINK_get_surface_pixel_alignment) {
602 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
603 }
604 dlclose(lib_surface_utils);
605 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700606#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000607 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
608 mPDSupported = (0 <= mPDIndex) ? true : false;
609
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700610 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700611}
612
613/*===========================================================================
614 * FUNCTION : ~QCamera3HardwareInterface
615 *
616 * DESCRIPTION: destructor of QCamera3HardwareInterface
617 *
618 * PARAMETERS : none
619 *
620 * RETURN : none
621 *==========================================================================*/
622QCamera3HardwareInterface::~QCamera3HardwareInterface()
623{
624 LOGD("E");
625
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800626 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700627
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800628 // Disable power hint and enable the perf lock for close camera
629 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
630 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
631
632 // unlink of dualcam during close camera
633 if (mIsDeviceLinked) {
634 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
635 &m_pDualCamCmdPtr->bundle_info;
636 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
637 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
638 pthread_mutex_lock(&gCamLock);
639
640 if (mIsMainCamera == 1) {
641 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
642 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
643 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
644 // related session id should be session id of linked session
645 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
646 } else {
647 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
648 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
649 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
650 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
651 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800652 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800653 pthread_mutex_unlock(&gCamLock);
654
655 rc = mCameraHandle->ops->set_dual_cam_cmd(
656 mCameraHandle->camera_handle);
657 if (rc < 0) {
658 LOGE("Dualcam: Unlink failed, but still proceed to close");
659 }
660 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700661
662 /* We need to stop all streams before deleting any stream */
663 if (mRawDumpChannel) {
664 mRawDumpChannel->stop();
665 }
666
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700667 if (mHdrPlusRawSrcChannel) {
668 mHdrPlusRawSrcChannel->stop();
669 }
670
Thierry Strudel3d639192016-09-09 11:52:26 -0700671 // NOTE: 'camera3_stream_t *' objects are already freed at
672 // this stage by the framework
673 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
674 it != mStreamInfo.end(); it++) {
675 QCamera3ProcessingChannel *channel = (*it)->channel;
676 if (channel) {
677 channel->stop();
678 }
679 }
680 if (mSupportChannel)
681 mSupportChannel->stop();
682
683 if (mAnalysisChannel) {
684 mAnalysisChannel->stop();
685 }
686 if (mMetadataChannel) {
687 mMetadataChannel->stop();
688 }
689 if (mChannelHandle) {
690 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700691 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700692 LOGD("stopping channel %d", mChannelHandle);
693 }
694
695 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
696 it != mStreamInfo.end(); it++) {
697 QCamera3ProcessingChannel *channel = (*it)->channel;
698 if (channel)
699 delete channel;
700 free (*it);
701 }
702 if (mSupportChannel) {
703 delete mSupportChannel;
704 mSupportChannel = NULL;
705 }
706
707 if (mAnalysisChannel) {
708 delete mAnalysisChannel;
709 mAnalysisChannel = NULL;
710 }
711 if (mRawDumpChannel) {
712 delete mRawDumpChannel;
713 mRawDumpChannel = NULL;
714 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700715 if (mHdrPlusRawSrcChannel) {
716 delete mHdrPlusRawSrcChannel;
717 mHdrPlusRawSrcChannel = NULL;
718 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700719 if (mDummyBatchChannel) {
720 delete mDummyBatchChannel;
721 mDummyBatchChannel = NULL;
722 }
723
724 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800725 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700726
727 if (mMetadataChannel) {
728 delete mMetadataChannel;
729 mMetadataChannel = NULL;
730 }
731
732 /* Clean up all channels */
733 if (mCameraInitialized) {
734 if(!mFirstConfiguration){
735 //send the last unconfigure
736 cam_stream_size_info_t stream_config_info;
737 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
738 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
739 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800740 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700741 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700742 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700743 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
744 stream_config_info);
745 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
746 if (rc < 0) {
747 LOGE("set_parms failed for unconfigure");
748 }
749 }
750 deinitParameters();
751 }
752
753 if (mChannelHandle) {
754 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
755 mChannelHandle);
756 LOGH("deleting channel %d", mChannelHandle);
757 mChannelHandle = 0;
758 }
759
760 if (mState != CLOSED)
761 closeCamera();
762
763 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
764 req.mPendingBufferList.clear();
765 }
766 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700767 for (pendingRequestIterator i = mPendingRequestsList.begin();
768 i != mPendingRequestsList.end();) {
769 i = erasePendingRequest(i);
770 }
771 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
772 if (mDefaultMetadata[i])
773 free_camera_metadata(mDefaultMetadata[i]);
774
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800775 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700776
777 pthread_cond_destroy(&mRequestCond);
778
779 pthread_cond_destroy(&mBuffersCond);
780
781 pthread_mutex_destroy(&mMutex);
782 LOGD("X");
783}
784
785/*===========================================================================
786 * FUNCTION : erasePendingRequest
787 *
788 * DESCRIPTION: function to erase a desired pending request after freeing any
789 * allocated memory
790 *
791 * PARAMETERS :
792 * @i : iterator pointing to pending request to be erased
793 *
794 * RETURN : iterator pointing to the next request
795 *==========================================================================*/
796QCamera3HardwareInterface::pendingRequestIterator
797 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
798{
799 if (i->input_buffer != NULL) {
800 free(i->input_buffer);
801 i->input_buffer = NULL;
802 }
803 if (i->settings != NULL)
804 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100805
806 mExpectedInflightDuration -= i->expectedFrameDuration;
807 if (mExpectedInflightDuration < 0) {
808 LOGE("Negative expected in-flight duration!");
809 mExpectedInflightDuration = 0;
810 }
811
Thierry Strudel3d639192016-09-09 11:52:26 -0700812 return mPendingRequestsList.erase(i);
813}
814
815/*===========================================================================
816 * FUNCTION : camEvtHandle
817 *
818 * DESCRIPTION: Function registered to mm-camera-interface to handle events
819 *
820 * PARAMETERS :
821 * @camera_handle : interface layer camera handle
822 * @evt : ptr to event
823 * @user_data : user data ptr
824 *
825 * RETURN : none
826 *==========================================================================*/
827void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
828 mm_camera_event_t *evt,
829 void *user_data)
830{
831 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
832 if (obj && evt) {
833 switch(evt->server_event_type) {
834 case CAM_EVENT_TYPE_DAEMON_DIED:
835 pthread_mutex_lock(&obj->mMutex);
836 obj->mState = ERROR;
837 pthread_mutex_unlock(&obj->mMutex);
838 LOGE("Fatal, camera daemon died");
839 break;
840
841 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
842 LOGD("HAL got request pull from Daemon");
843 pthread_mutex_lock(&obj->mMutex);
844 obj->mWokenUpByDaemon = true;
845 obj->unblockRequestIfNecessary();
846 pthread_mutex_unlock(&obj->mMutex);
847 break;
848
849 default:
850 LOGW("Warning: Unhandled event %d",
851 evt->server_event_type);
852 break;
853 }
854 } else {
855 LOGE("NULL user_data/evt");
856 }
857}
858
859/*===========================================================================
860 * FUNCTION : openCamera
861 *
862 * DESCRIPTION: open camera
863 *
864 * PARAMETERS :
865 * @hw_device : double ptr for camera device struct
866 *
867 * RETURN : int32_t type of status
868 * NO_ERROR -- success
869 * none-zero failure code
870 *==========================================================================*/
871int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
872{
873 int rc = 0;
874 if (mState != CLOSED) {
875 *hw_device = NULL;
876 return PERMISSION_DENIED;
877 }
878
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700879 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800880 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700881 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
882 mCameraId);
883
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700884 if (mCameraHandle) {
885 LOGE("Failure: Camera already opened");
886 return ALREADY_EXISTS;
887 }
888
889 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700890 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700891 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700892 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700893 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700894 if (rc != 0) {
895 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
896 return rc;
897 }
898 }
899 }
900
Thierry Strudel3d639192016-09-09 11:52:26 -0700901 rc = openCamera();
902 if (rc == 0) {
903 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800904 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700905 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700906
907 // Suspend Easel because opening camera failed.
908 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700909 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700910 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
911 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700912 if (suspendErr != 0) {
913 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
914 strerror(-suspendErr), suspendErr);
915 }
916 }
917 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800918 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700919
Thierry Strudel3d639192016-09-09 11:52:26 -0700920 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
921 mCameraId, rc);
922
923 if (rc == NO_ERROR) {
924 mState = OPENED;
925 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800926
Thierry Strudel3d639192016-09-09 11:52:26 -0700927 return rc;
928}
929
930/*===========================================================================
931 * FUNCTION : openCamera
932 *
933 * DESCRIPTION: open camera
934 *
935 * PARAMETERS : none
936 *
937 * RETURN : int32_t type of status
938 * NO_ERROR -- success
939 * none-zero failure code
940 *==========================================================================*/
941int QCamera3HardwareInterface::openCamera()
942{
943 int rc = 0;
944 char value[PROPERTY_VALUE_MAX];
945
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800946 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800947
Thierry Strudel3d639192016-09-09 11:52:26 -0700948 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
949 if (rc < 0) {
950 LOGE("Failed to reserve flash for camera id: %d",
951 mCameraId);
952 return UNKNOWN_ERROR;
953 }
954
955 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
956 if (rc) {
957 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
958 return rc;
959 }
960
961 if (!mCameraHandle) {
962 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
963 return -ENODEV;
964 }
965
966 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
967 camEvtHandle, (void *)this);
968
969 if (rc < 0) {
970 LOGE("Error, failed to register event callback");
971 /* Not closing camera here since it is already handled in destructor */
972 return FAILED_TRANSACTION;
973 }
974
975 mExifParams.debug_params =
976 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
977 if (mExifParams.debug_params) {
978 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
979 } else {
980 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
981 return NO_MEMORY;
982 }
983 mFirstConfiguration = true;
984
985 //Notify display HAL that a camera session is active.
986 //But avoid calling the same during bootup because camera service might open/close
987 //cameras at boot time during its initialization and display service will also internally
988 //wait for camera service to initialize first while calling this display API, resulting in a
989 //deadlock situation. Since boot time camera open/close calls are made only to fetch
990 //capabilities, no need of this display bw optimization.
991 //Use "service.bootanim.exit" property to know boot status.
992 property_get("service.bootanim.exit", value, "0");
993 if (atoi(value) == 1) {
994 pthread_mutex_lock(&gCamLock);
995 if (gNumCameraSessions++ == 0) {
996 setCameraLaunchStatus(true);
997 }
998 pthread_mutex_unlock(&gCamLock);
999 }
1000
1001 //fill the session id needed while linking dual cam
1002 pthread_mutex_lock(&gCamLock);
1003 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1004 &sessionId[mCameraId]);
1005 pthread_mutex_unlock(&gCamLock);
1006
1007 if (rc < 0) {
1008 LOGE("Error, failed to get sessiion id");
1009 return UNKNOWN_ERROR;
1010 } else {
1011 //Allocate related cam sync buffer
1012 //this is needed for the payload that goes along with bundling cmd for related
1013 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001014 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1015 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001016 if(rc != OK) {
1017 rc = NO_MEMORY;
1018 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1019 return NO_MEMORY;
1020 }
1021
1022 //Map memory for related cam sync buffer
1023 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001024 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1025 m_pDualCamCmdHeap->getFd(0),
1026 sizeof(cam_dual_camera_cmd_info_t),
1027 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001028 if(rc < 0) {
1029 LOGE("Dualcam: failed to map Related cam sync buffer");
1030 rc = FAILED_TRANSACTION;
1031 return NO_MEMORY;
1032 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001033 m_pDualCamCmdPtr =
1034 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001035 }
1036
1037 LOGH("mCameraId=%d",mCameraId);
1038
1039 return NO_ERROR;
1040}
1041
1042/*===========================================================================
1043 * FUNCTION : closeCamera
1044 *
1045 * DESCRIPTION: close camera
1046 *
1047 * PARAMETERS : none
1048 *
1049 * RETURN : int32_t type of status
1050 * NO_ERROR -- success
1051 * none-zero failure code
1052 *==========================================================================*/
1053int QCamera3HardwareInterface::closeCamera()
1054{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001055 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001056 int rc = NO_ERROR;
1057 char value[PROPERTY_VALUE_MAX];
1058
1059 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1060 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001061
1062 // unmap memory for related cam sync buffer
1063 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001064 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001065 if (NULL != m_pDualCamCmdHeap) {
1066 m_pDualCamCmdHeap->deallocate();
1067 delete m_pDualCamCmdHeap;
1068 m_pDualCamCmdHeap = NULL;
1069 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001070 }
1071
Thierry Strudel3d639192016-09-09 11:52:26 -07001072 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1073 mCameraHandle = NULL;
1074
1075 //reset session id to some invalid id
1076 pthread_mutex_lock(&gCamLock);
1077 sessionId[mCameraId] = 0xDEADBEEF;
1078 pthread_mutex_unlock(&gCamLock);
1079
1080 //Notify display HAL that there is no active camera session
1081 //but avoid calling the same during bootup. Refer to openCamera
1082 //for more details.
1083 property_get("service.bootanim.exit", value, "0");
1084 if (atoi(value) == 1) {
1085 pthread_mutex_lock(&gCamLock);
1086 if (--gNumCameraSessions == 0) {
1087 setCameraLaunchStatus(false);
1088 }
1089 pthread_mutex_unlock(&gCamLock);
1090 }
1091
Thierry Strudel3d639192016-09-09 11:52:26 -07001092 if (mExifParams.debug_params) {
1093 free(mExifParams.debug_params);
1094 mExifParams.debug_params = NULL;
1095 }
1096 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1097 LOGW("Failed to release flash for camera id: %d",
1098 mCameraId);
1099 }
1100 mState = CLOSED;
1101 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1102 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001103
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001104 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001105 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1106 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001107 if (gHdrPlusClient != nullptr) {
1108 // Disable HDR+ mode.
1109 disableHdrPlusModeLocked();
1110 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001111 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001112 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001113 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001114
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001115 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001116 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001117 if (rc != 0) {
1118 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1119 }
1120
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001121 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001122 if (rc != 0) {
1123 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1124 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001125 }
1126 }
1127
Thierry Strudel3d639192016-09-09 11:52:26 -07001128 return rc;
1129}
1130
1131/*===========================================================================
1132 * FUNCTION : initialize
1133 *
1134 * DESCRIPTION: Initialize frameworks callback functions
1135 *
1136 * PARAMETERS :
1137 * @callback_ops : callback function to frameworks
1138 *
1139 * RETURN :
1140 *
1141 *==========================================================================*/
1142int QCamera3HardwareInterface::initialize(
1143 const struct camera3_callback_ops *callback_ops)
1144{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001145 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001146 int rc;
1147
1148 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1149 pthread_mutex_lock(&mMutex);
1150
1151 // Validate current state
1152 switch (mState) {
1153 case OPENED:
1154 /* valid state */
1155 break;
1156 default:
1157 LOGE("Invalid state %d", mState);
1158 rc = -ENODEV;
1159 goto err1;
1160 }
1161
1162 rc = initParameters();
1163 if (rc < 0) {
1164 LOGE("initParamters failed %d", rc);
1165 goto err1;
1166 }
1167 mCallbackOps = callback_ops;
1168
1169 mChannelHandle = mCameraHandle->ops->add_channel(
1170 mCameraHandle->camera_handle, NULL, NULL, this);
1171 if (mChannelHandle == 0) {
1172 LOGE("add_channel failed");
1173 rc = -ENOMEM;
1174 pthread_mutex_unlock(&mMutex);
1175 return rc;
1176 }
1177
1178 pthread_mutex_unlock(&mMutex);
1179 mCameraInitialized = true;
1180 mState = INITIALIZED;
1181 LOGI("X");
1182 return 0;
1183
1184err1:
1185 pthread_mutex_unlock(&mMutex);
1186 return rc;
1187}
1188
1189/*===========================================================================
1190 * FUNCTION : validateStreamDimensions
1191 *
1192 * DESCRIPTION: Check if the configuration requested are those advertised
1193 *
1194 * PARAMETERS :
1195 * @stream_list : streams to be configured
1196 *
1197 * RETURN :
1198 *
1199 *==========================================================================*/
1200int QCamera3HardwareInterface::validateStreamDimensions(
1201 camera3_stream_configuration_t *streamList)
1202{
1203 int rc = NO_ERROR;
1204 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001205 uint32_t depthWidth = 0;
1206 uint32_t depthHeight = 0;
1207 if (mPDSupported) {
1208 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1209 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1210 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001211
1212 camera3_stream_t *inputStream = NULL;
1213 /*
1214 * Loop through all streams to find input stream if it exists*
1215 */
1216 for (size_t i = 0; i< streamList->num_streams; i++) {
1217 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1218 if (inputStream != NULL) {
1219 LOGE("Error, Multiple input streams requested");
1220 return -EINVAL;
1221 }
1222 inputStream = streamList->streams[i];
1223 }
1224 }
1225 /*
1226 * Loop through all streams requested in configuration
1227 * Check if unsupported sizes have been requested on any of them
1228 */
1229 for (size_t j = 0; j < streamList->num_streams; j++) {
1230 bool sizeFound = false;
1231 camera3_stream_t *newStream = streamList->streams[j];
1232
1233 uint32_t rotatedHeight = newStream->height;
1234 uint32_t rotatedWidth = newStream->width;
1235 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1236 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1237 rotatedHeight = newStream->width;
1238 rotatedWidth = newStream->height;
1239 }
1240
1241 /*
1242 * Sizes are different for each type of stream format check against
1243 * appropriate table.
1244 */
1245 switch (newStream->format) {
1246 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1247 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1248 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001249 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1250 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1251 mPDSupported) {
1252 if ((depthWidth == newStream->width) &&
1253 (depthHeight == newStream->height)) {
1254 sizeFound = true;
1255 }
1256 break;
1257 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001258 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1259 for (size_t i = 0; i < count; i++) {
1260 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1261 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1262 sizeFound = true;
1263 break;
1264 }
1265 }
1266 break;
1267 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001268 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1269 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001270 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001271 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001272 if ((depthSamplesCount == newStream->width) &&
1273 (1 == newStream->height)) {
1274 sizeFound = true;
1275 }
1276 break;
1277 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001278 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1279 /* Verify set size against generated sizes table */
1280 for (size_t i = 0; i < count; i++) {
1281 if (((int32_t)rotatedWidth ==
1282 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1283 ((int32_t)rotatedHeight ==
1284 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1285 sizeFound = true;
1286 break;
1287 }
1288 }
1289 break;
1290 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1291 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1292 default:
1293 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1294 || newStream->stream_type == CAMERA3_STREAM_INPUT
1295 || IS_USAGE_ZSL(newStream->usage)) {
1296 if (((int32_t)rotatedWidth ==
1297 gCamCapability[mCameraId]->active_array_size.width) &&
1298 ((int32_t)rotatedHeight ==
1299 gCamCapability[mCameraId]->active_array_size.height)) {
1300 sizeFound = true;
1301 break;
1302 }
1303 /* We could potentially break here to enforce ZSL stream
1304 * set from frameworks always is full active array size
1305 * but it is not clear from the spc if framework will always
1306 * follow that, also we have logic to override to full array
1307 * size, so keeping the logic lenient at the moment
1308 */
1309 }
1310 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1311 MAX_SIZES_CNT);
1312 for (size_t i = 0; i < count; i++) {
1313 if (((int32_t)rotatedWidth ==
1314 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1315 ((int32_t)rotatedHeight ==
1316 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1317 sizeFound = true;
1318 break;
1319 }
1320 }
1321 break;
1322 } /* End of switch(newStream->format) */
1323
1324 /* We error out even if a single stream has unsupported size set */
1325 if (!sizeFound) {
1326 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1327 rotatedWidth, rotatedHeight, newStream->format,
1328 gCamCapability[mCameraId]->active_array_size.width,
1329 gCamCapability[mCameraId]->active_array_size.height);
1330 rc = -EINVAL;
1331 break;
1332 }
1333 } /* End of for each stream */
1334 return rc;
1335}
1336
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001337/*===========================================================================
1338 * FUNCTION : validateUsageFlags
1339 *
1340 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1341 *
1342 * PARAMETERS :
1343 * @stream_list : streams to be configured
1344 *
1345 * RETURN :
1346 * NO_ERROR if the usage flags are supported
1347 * error code if usage flags are not supported
1348 *
1349 *==========================================================================*/
1350int QCamera3HardwareInterface::validateUsageFlags(
1351 const camera3_stream_configuration_t* streamList)
1352{
1353 for (size_t j = 0; j < streamList->num_streams; j++) {
1354 const camera3_stream_t *newStream = streamList->streams[j];
1355
1356 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1357 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1358 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1359 continue;
1360 }
1361
Jason Leec4cf5032017-05-24 18:31:41 -07001362 // Here we only care whether it's EIS3 or not
1363 char is_type_value[PROPERTY_VALUE_MAX];
1364 property_get("persist.camera.is_type", is_type_value, "4");
1365 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1366 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1367 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1368 isType = IS_TYPE_NONE;
1369
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001370 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1371 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1372 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1373 bool forcePreviewUBWC = true;
1374 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1375 forcePreviewUBWC = false;
1376 }
1377 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001378 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001379 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001380 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001381 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001382 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001383
1384 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1385 // So color spaces will always match.
1386
1387 // Check whether underlying formats of shared streams match.
1388 if (isVideo && isPreview && videoFormat != previewFormat) {
1389 LOGE("Combined video and preview usage flag is not supported");
1390 return -EINVAL;
1391 }
1392 if (isPreview && isZSL && previewFormat != zslFormat) {
1393 LOGE("Combined preview and zsl usage flag is not supported");
1394 return -EINVAL;
1395 }
1396 if (isVideo && isZSL && videoFormat != zslFormat) {
1397 LOGE("Combined video and zsl usage flag is not supported");
1398 return -EINVAL;
1399 }
1400 }
1401 return NO_ERROR;
1402}
1403
1404/*===========================================================================
1405 * FUNCTION : validateUsageFlagsForEis
1406 *
1407 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1408 *
1409 * PARAMETERS :
1410 * @stream_list : streams to be configured
1411 *
1412 * RETURN :
1413 * NO_ERROR if the usage flags are supported
1414 * error code if usage flags are not supported
1415 *
1416 *==========================================================================*/
1417int QCamera3HardwareInterface::validateUsageFlagsForEis(
1418 const camera3_stream_configuration_t* streamList)
1419{
1420 for (size_t j = 0; j < streamList->num_streams; j++) {
1421 const camera3_stream_t *newStream = streamList->streams[j];
1422
1423 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1424 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1425
1426 // Because EIS is "hard-coded" for certain use case, and current
1427 // implementation doesn't support shared preview and video on the same
1428 // stream, return failure if EIS is forced on.
1429 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1430 LOGE("Combined video and preview usage flag is not supported due to EIS");
1431 return -EINVAL;
1432 }
1433 }
1434 return NO_ERROR;
1435}
1436
Thierry Strudel3d639192016-09-09 11:52:26 -07001437/*==============================================================================
1438 * FUNCTION : isSupportChannelNeeded
1439 *
1440 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1441 *
1442 * PARAMETERS :
1443 * @stream_list : streams to be configured
1444 * @stream_config_info : the config info for streams to be configured
1445 *
1446 * RETURN : Boolen true/false decision
1447 *
1448 *==========================================================================*/
1449bool QCamera3HardwareInterface::isSupportChannelNeeded(
1450 camera3_stream_configuration_t *streamList,
1451 cam_stream_size_info_t stream_config_info)
1452{
1453 uint32_t i;
1454 bool pprocRequested = false;
1455 /* Check for conditions where PProc pipeline does not have any streams*/
1456 for (i = 0; i < stream_config_info.num_streams; i++) {
1457 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1458 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1459 pprocRequested = true;
1460 break;
1461 }
1462 }
1463
1464 if (pprocRequested == false )
1465 return true;
1466
1467 /* Dummy stream needed if only raw or jpeg streams present */
1468 for (i = 0; i < streamList->num_streams; i++) {
1469 switch(streamList->streams[i]->format) {
1470 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1471 case HAL_PIXEL_FORMAT_RAW10:
1472 case HAL_PIXEL_FORMAT_RAW16:
1473 case HAL_PIXEL_FORMAT_BLOB:
1474 break;
1475 default:
1476 return false;
1477 }
1478 }
1479 return true;
1480}
1481
1482/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001483 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001484 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001485 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001486 *
1487 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001488 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001489 *
1490 * RETURN : int32_t type of status
1491 * NO_ERROR -- success
1492 * none-zero failure code
1493 *
1494 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001495int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001496{
1497 int32_t rc = NO_ERROR;
1498
1499 cam_dimension_t max_dim = {0, 0};
1500 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1501 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1502 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1503 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1504 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1505 }
1506
1507 clear_metadata_buffer(mParameters);
1508
1509 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1510 max_dim);
1511 if (rc != NO_ERROR) {
1512 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1513 return rc;
1514 }
1515
1516 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1517 if (rc != NO_ERROR) {
1518 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1519 return rc;
1520 }
1521
1522 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001523 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001524
1525 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1526 mParameters);
1527 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001528 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001529 return rc;
1530 }
1531
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001532 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001533 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1534 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1535 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1536 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1537 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001538
1539 return rc;
1540}
1541
1542/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001543 * FUNCTION : getCurrentSensorModeInfo
1544 *
1545 * DESCRIPTION: Get sensor mode information that is currently selected.
1546 *
1547 * PARAMETERS :
1548 * @sensorModeInfo : sensor mode information (output)
1549 *
1550 * RETURN : int32_t type of status
1551 * NO_ERROR -- success
1552 * none-zero failure code
1553 *
1554 *==========================================================================*/
1555int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1556{
1557 int32_t rc = NO_ERROR;
1558
1559 clear_metadata_buffer(mParameters);
1560 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1561
1562 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1563 mParameters);
1564 if (rc != NO_ERROR) {
1565 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1566 return rc;
1567 }
1568
1569 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1570 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1571 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1572 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1573 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1574 sensorModeInfo.num_raw_bits);
1575
1576 return rc;
1577}
1578
1579/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001580 * FUNCTION : addToPPFeatureMask
1581 *
1582 * DESCRIPTION: add additional features to pp feature mask based on
1583 * stream type and usecase
1584 *
1585 * PARAMETERS :
1586 * @stream_format : stream type for feature mask
1587 * @stream_idx : stream idx within postprocess_mask list to change
1588 *
1589 * RETURN : NULL
1590 *
1591 *==========================================================================*/
1592void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1593 uint32_t stream_idx)
1594{
1595 char feature_mask_value[PROPERTY_VALUE_MAX];
1596 cam_feature_mask_t feature_mask;
1597 int args_converted;
1598 int property_len;
1599
1600 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001601#ifdef _LE_CAMERA_
1602 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1603 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1604 property_len = property_get("persist.camera.hal3.feature",
1605 feature_mask_value, swtnr_feature_mask_value);
1606#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001607 property_len = property_get("persist.camera.hal3.feature",
1608 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001609#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001610 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1611 (feature_mask_value[1] == 'x')) {
1612 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1613 } else {
1614 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1615 }
1616 if (1 != args_converted) {
1617 feature_mask = 0;
1618 LOGE("Wrong feature mask %s", feature_mask_value);
1619 return;
1620 }
1621
1622 switch (stream_format) {
1623 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1624 /* Add LLVD to pp feature mask only if video hint is enabled */
1625 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1626 mStreamConfigInfo.postprocess_mask[stream_idx]
1627 |= CAM_QTI_FEATURE_SW_TNR;
1628 LOGH("Added SW TNR to pp feature mask");
1629 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1630 mStreamConfigInfo.postprocess_mask[stream_idx]
1631 |= CAM_QCOM_FEATURE_LLVD;
1632 LOGH("Added LLVD SeeMore to pp feature mask");
1633 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001634 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1635 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1636 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1637 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001638 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1639 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1640 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1641 CAM_QTI_FEATURE_BINNING_CORRECTION;
1642 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001643 break;
1644 }
1645 default:
1646 break;
1647 }
1648 LOGD("PP feature mask %llx",
1649 mStreamConfigInfo.postprocess_mask[stream_idx]);
1650}
1651
1652/*==============================================================================
1653 * FUNCTION : updateFpsInPreviewBuffer
1654 *
1655 * DESCRIPTION: update FPS information in preview buffer.
1656 *
1657 * PARAMETERS :
1658 * @metadata : pointer to metadata buffer
1659 * @frame_number: frame_number to look for in pending buffer list
1660 *
1661 * RETURN : None
1662 *
1663 *==========================================================================*/
1664void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1665 uint32_t frame_number)
1666{
1667 // Mark all pending buffers for this particular request
1668 // with corresponding framerate information
1669 for (List<PendingBuffersInRequest>::iterator req =
1670 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1671 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1672 for(List<PendingBufferInfo>::iterator j =
1673 req->mPendingBufferList.begin();
1674 j != req->mPendingBufferList.end(); j++) {
1675 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1676 if ((req->frame_number == frame_number) &&
1677 (channel->getStreamTypeMask() &
1678 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1679 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1680 CAM_INTF_PARM_FPS_RANGE, metadata) {
1681 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1682 struct private_handle_t *priv_handle =
1683 (struct private_handle_t *)(*(j->buffer));
1684 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1685 }
1686 }
1687 }
1688 }
1689}
1690
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001691/*==============================================================================
1692 * FUNCTION : updateTimeStampInPendingBuffers
1693 *
1694 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1695 * of a frame number
1696 *
1697 * PARAMETERS :
1698 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1699 * @timestamp : timestamp to be set
1700 *
1701 * RETURN : None
1702 *
1703 *==========================================================================*/
1704void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1705 uint32_t frameNumber, nsecs_t timestamp)
1706{
1707 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1708 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1709 if (req->frame_number != frameNumber)
1710 continue;
1711
1712 for (auto k = req->mPendingBufferList.begin();
1713 k != req->mPendingBufferList.end(); k++ ) {
1714 struct private_handle_t *priv_handle =
1715 (struct private_handle_t *) (*(k->buffer));
1716 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1717 }
1718 }
1719 return;
1720}
1721
Thierry Strudel3d639192016-09-09 11:52:26 -07001722/*===========================================================================
1723 * FUNCTION : configureStreams
1724 *
1725 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1726 * and output streams.
1727 *
1728 * PARAMETERS :
1729 * @stream_list : streams to be configured
1730 *
1731 * RETURN :
1732 *
1733 *==========================================================================*/
1734int QCamera3HardwareInterface::configureStreams(
1735 camera3_stream_configuration_t *streamList)
1736{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001737 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001738 int rc = 0;
1739
1740 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001741 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001742 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001743 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001744
1745 return rc;
1746}
1747
1748/*===========================================================================
1749 * FUNCTION : configureStreamsPerfLocked
1750 *
1751 * DESCRIPTION: configureStreams while perfLock is held.
1752 *
1753 * PARAMETERS :
1754 * @stream_list : streams to be configured
1755 *
1756 * RETURN : int32_t type of status
1757 * NO_ERROR -- success
1758 * none-zero failure code
1759 *==========================================================================*/
1760int QCamera3HardwareInterface::configureStreamsPerfLocked(
1761 camera3_stream_configuration_t *streamList)
1762{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001763 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001764 int rc = 0;
1765
1766 // Sanity check stream_list
1767 if (streamList == NULL) {
1768 LOGE("NULL stream configuration");
1769 return BAD_VALUE;
1770 }
1771 if (streamList->streams == NULL) {
1772 LOGE("NULL stream list");
1773 return BAD_VALUE;
1774 }
1775
1776 if (streamList->num_streams < 1) {
1777 LOGE("Bad number of streams requested: %d",
1778 streamList->num_streams);
1779 return BAD_VALUE;
1780 }
1781
1782 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1783 LOGE("Maximum number of streams %d exceeded: %d",
1784 MAX_NUM_STREAMS, streamList->num_streams);
1785 return BAD_VALUE;
1786 }
1787
Jason Leec4cf5032017-05-24 18:31:41 -07001788 mOpMode = streamList->operation_mode;
1789 LOGD("mOpMode: %d", mOpMode);
1790
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001791 rc = validateUsageFlags(streamList);
1792 if (rc != NO_ERROR) {
1793 return rc;
1794 }
1795
Thierry Strudel3d639192016-09-09 11:52:26 -07001796 /* first invalidate all the steams in the mStreamList
1797 * if they appear again, they will be validated */
1798 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1799 it != mStreamInfo.end(); it++) {
1800 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1801 if (channel) {
1802 channel->stop();
1803 }
1804 (*it)->status = INVALID;
1805 }
1806
1807 if (mRawDumpChannel) {
1808 mRawDumpChannel->stop();
1809 delete mRawDumpChannel;
1810 mRawDumpChannel = NULL;
1811 }
1812
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001813 if (mHdrPlusRawSrcChannel) {
1814 mHdrPlusRawSrcChannel->stop();
1815 delete mHdrPlusRawSrcChannel;
1816 mHdrPlusRawSrcChannel = NULL;
1817 }
1818
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 if (mSupportChannel)
1820 mSupportChannel->stop();
1821
1822 if (mAnalysisChannel) {
1823 mAnalysisChannel->stop();
1824 }
1825 if (mMetadataChannel) {
1826 /* If content of mStreamInfo is not 0, there is metadata stream */
1827 mMetadataChannel->stop();
1828 }
1829 if (mChannelHandle) {
1830 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001831 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001832 LOGD("stopping channel %d", mChannelHandle);
1833 }
1834
1835 pthread_mutex_lock(&mMutex);
1836
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001837 mPictureChannel = NULL;
1838
Thierry Strudel3d639192016-09-09 11:52:26 -07001839 // Check state
1840 switch (mState) {
1841 case INITIALIZED:
1842 case CONFIGURED:
1843 case STARTED:
1844 /* valid state */
1845 break;
1846 default:
1847 LOGE("Invalid state %d", mState);
1848 pthread_mutex_unlock(&mMutex);
1849 return -ENODEV;
1850 }
1851
1852 /* Check whether we have video stream */
1853 m_bIs4KVideo = false;
1854 m_bIsVideo = false;
1855 m_bEisSupportedSize = false;
1856 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001857 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001858 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001859 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001860 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001861 uint32_t videoWidth = 0U;
1862 uint32_t videoHeight = 0U;
1863 size_t rawStreamCnt = 0;
1864 size_t stallStreamCnt = 0;
1865 size_t processedStreamCnt = 0;
1866 // Number of streams on ISP encoder path
1867 size_t numStreamsOnEncoder = 0;
1868 size_t numYuv888OnEncoder = 0;
1869 bool bYuv888OverrideJpeg = false;
1870 cam_dimension_t largeYuv888Size = {0, 0};
1871 cam_dimension_t maxViewfinderSize = {0, 0};
1872 bool bJpegExceeds4K = false;
1873 bool bJpegOnEncoder = false;
1874 bool bUseCommonFeatureMask = false;
1875 cam_feature_mask_t commonFeatureMask = 0;
1876 bool bSmallJpegSize = false;
1877 uint32_t width_ratio;
1878 uint32_t height_ratio;
1879 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1880 camera3_stream_t *inputStream = NULL;
1881 bool isJpeg = false;
1882 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001883 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001884 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001885
1886 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1887
1888 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001889 uint8_t eis_prop_set;
1890 uint32_t maxEisWidth = 0;
1891 uint32_t maxEisHeight = 0;
1892
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001893 // Initialize all instant AEC related variables
1894 mInstantAEC = false;
1895 mResetInstantAEC = false;
1896 mInstantAECSettledFrameNumber = 0;
1897 mAecSkipDisplayFrameBound = 0;
1898 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001899 mCurrFeatureState = 0;
1900 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001901
Thierry Strudel3d639192016-09-09 11:52:26 -07001902 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1903
1904 size_t count = IS_TYPE_MAX;
1905 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1906 for (size_t i = 0; i < count; i++) {
1907 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001908 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1909 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 break;
1911 }
1912 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001913
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001914 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001915 maxEisWidth = MAX_EIS_WIDTH;
1916 maxEisHeight = MAX_EIS_HEIGHT;
1917 }
1918
1919 /* EIS setprop control */
1920 char eis_prop[PROPERTY_VALUE_MAX];
1921 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001922 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001923 eis_prop_set = (uint8_t)atoi(eis_prop);
1924
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001925 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001926 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1927
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001928 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1929 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001930
Thierry Strudel3d639192016-09-09 11:52:26 -07001931 /* stream configurations */
1932 for (size_t i = 0; i < streamList->num_streams; i++) {
1933 camera3_stream_t *newStream = streamList->streams[i];
1934 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1935 "height = %d, rotation = %d, usage = 0x%x",
1936 i, newStream->stream_type, newStream->format,
1937 newStream->width, newStream->height, newStream->rotation,
1938 newStream->usage);
1939 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1940 newStream->stream_type == CAMERA3_STREAM_INPUT){
1941 isZsl = true;
1942 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001943 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1944 IS_USAGE_PREVIEW(newStream->usage)) {
1945 isPreview = true;
1946 }
1947
Thierry Strudel3d639192016-09-09 11:52:26 -07001948 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1949 inputStream = newStream;
1950 }
1951
Emilian Peev7650c122017-01-19 08:24:33 -08001952 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1953 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001954 isJpeg = true;
1955 jpegSize.width = newStream->width;
1956 jpegSize.height = newStream->height;
1957 if (newStream->width > VIDEO_4K_WIDTH ||
1958 newStream->height > VIDEO_4K_HEIGHT)
1959 bJpegExceeds4K = true;
1960 }
1961
1962 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1963 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1964 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001965 // In HAL3 we can have multiple different video streams.
1966 // The variables video width and height are used below as
1967 // dimensions of the biggest of them
1968 if (videoWidth < newStream->width ||
1969 videoHeight < newStream->height) {
1970 videoWidth = newStream->width;
1971 videoHeight = newStream->height;
1972 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001973 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1974 (VIDEO_4K_HEIGHT <= newStream->height)) {
1975 m_bIs4KVideo = true;
1976 }
1977 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1978 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001979
Thierry Strudel3d639192016-09-09 11:52:26 -07001980 }
1981 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1982 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1983 switch (newStream->format) {
1984 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001985 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1986 depthPresent = true;
1987 break;
1988 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001989 stallStreamCnt++;
1990 if (isOnEncoder(maxViewfinderSize, newStream->width,
1991 newStream->height)) {
1992 numStreamsOnEncoder++;
1993 bJpegOnEncoder = true;
1994 }
1995 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1996 newStream->width);
1997 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1998 newStream->height);;
1999 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2000 "FATAL: max_downscale_factor cannot be zero and so assert");
2001 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2002 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2003 LOGH("Setting small jpeg size flag to true");
2004 bSmallJpegSize = true;
2005 }
2006 break;
2007 case HAL_PIXEL_FORMAT_RAW10:
2008 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2009 case HAL_PIXEL_FORMAT_RAW16:
2010 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002011 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2012 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2013 pdStatCount++;
2014 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002015 break;
2016 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2017 processedStreamCnt++;
2018 if (isOnEncoder(maxViewfinderSize, newStream->width,
2019 newStream->height)) {
2020 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2021 !IS_USAGE_ZSL(newStream->usage)) {
2022 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2023 }
2024 numStreamsOnEncoder++;
2025 }
2026 break;
2027 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2028 processedStreamCnt++;
2029 if (isOnEncoder(maxViewfinderSize, newStream->width,
2030 newStream->height)) {
2031 // If Yuv888 size is not greater than 4K, set feature mask
2032 // to SUPERSET so that it support concurrent request on
2033 // YUV and JPEG.
2034 if (newStream->width <= VIDEO_4K_WIDTH &&
2035 newStream->height <= VIDEO_4K_HEIGHT) {
2036 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2037 }
2038 numStreamsOnEncoder++;
2039 numYuv888OnEncoder++;
2040 largeYuv888Size.width = newStream->width;
2041 largeYuv888Size.height = newStream->height;
2042 }
2043 break;
2044 default:
2045 processedStreamCnt++;
2046 if (isOnEncoder(maxViewfinderSize, newStream->width,
2047 newStream->height)) {
2048 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2049 numStreamsOnEncoder++;
2050 }
2051 break;
2052 }
2053
2054 }
2055 }
2056
2057 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2058 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2059 !m_bIsVideo) {
2060 m_bEisEnable = false;
2061 }
2062
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002063 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2064 pthread_mutex_unlock(&mMutex);
2065 return -EINVAL;
2066 }
2067
Thierry Strudel54dc9782017-02-15 12:12:10 -08002068 uint8_t forceEnableTnr = 0;
2069 char tnr_prop[PROPERTY_VALUE_MAX];
2070 memset(tnr_prop, 0, sizeof(tnr_prop));
2071 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2072 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2073
Thierry Strudel3d639192016-09-09 11:52:26 -07002074 /* Logic to enable/disable TNR based on specific config size/etc.*/
2075 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002076 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2077 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002078 else if (forceEnableTnr)
2079 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002080
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002081 char videoHdrProp[PROPERTY_VALUE_MAX];
2082 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2083 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2084 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2085
2086 if (hdr_mode_prop == 1 && m_bIsVideo &&
2087 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2088 m_bVideoHdrEnabled = true;
2089 else
2090 m_bVideoHdrEnabled = false;
2091
2092
Thierry Strudel3d639192016-09-09 11:52:26 -07002093 /* Check if num_streams is sane */
2094 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2095 rawStreamCnt > MAX_RAW_STREAMS ||
2096 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2097 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2098 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2099 pthread_mutex_unlock(&mMutex);
2100 return -EINVAL;
2101 }
2102 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002103 if (isZsl && m_bIs4KVideo) {
2104 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002105 pthread_mutex_unlock(&mMutex);
2106 return -EINVAL;
2107 }
2108 /* Check if stream sizes are sane */
2109 if (numStreamsOnEncoder > 2) {
2110 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2111 pthread_mutex_unlock(&mMutex);
2112 return -EINVAL;
2113 } else if (1 < numStreamsOnEncoder){
2114 bUseCommonFeatureMask = true;
2115 LOGH("Multiple streams above max viewfinder size, common mask needed");
2116 }
2117
2118 /* Check if BLOB size is greater than 4k in 4k recording case */
2119 if (m_bIs4KVideo && bJpegExceeds4K) {
2120 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2121 pthread_mutex_unlock(&mMutex);
2122 return -EINVAL;
2123 }
2124
Emilian Peev7650c122017-01-19 08:24:33 -08002125 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2126 depthPresent) {
2127 LOGE("HAL doesn't support depth streams in HFR mode!");
2128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131
Thierry Strudel3d639192016-09-09 11:52:26 -07002132 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2133 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2134 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2135 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2136 // configurations:
2137 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2138 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2139 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2140 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2141 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2142 __func__);
2143 pthread_mutex_unlock(&mMutex);
2144 return -EINVAL;
2145 }
2146
2147 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2148 // the YUV stream's size is greater or equal to the JPEG size, set common
2149 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2150 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2151 jpegSize.width, jpegSize.height) &&
2152 largeYuv888Size.width > jpegSize.width &&
2153 largeYuv888Size.height > jpegSize.height) {
2154 bYuv888OverrideJpeg = true;
2155 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2156 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2157 }
2158
2159 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2160 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2161 commonFeatureMask);
2162 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2163 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2164
2165 rc = validateStreamDimensions(streamList);
2166 if (rc == NO_ERROR) {
2167 rc = validateStreamRotations(streamList);
2168 }
2169 if (rc != NO_ERROR) {
2170 LOGE("Invalid stream configuration requested!");
2171 pthread_mutex_unlock(&mMutex);
2172 return rc;
2173 }
2174
Emilian Peev0f3c3162017-03-15 12:57:46 +00002175 if (1 < pdStatCount) {
2176 LOGE("HAL doesn't support multiple PD streams");
2177 pthread_mutex_unlock(&mMutex);
2178 return -EINVAL;
2179 }
2180
2181 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2182 (1 == pdStatCount)) {
2183 LOGE("HAL doesn't support PD streams in HFR mode!");
2184 pthread_mutex_unlock(&mMutex);
2185 return -EINVAL;
2186 }
2187
Thierry Strudel3d639192016-09-09 11:52:26 -07002188 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2189 for (size_t i = 0; i < streamList->num_streams; i++) {
2190 camera3_stream_t *newStream = streamList->streams[i];
2191 LOGH("newStream type = %d, stream format = %d "
2192 "stream size : %d x %d, stream rotation = %d",
2193 newStream->stream_type, newStream->format,
2194 newStream->width, newStream->height, newStream->rotation);
2195 //if the stream is in the mStreamList validate it
2196 bool stream_exists = false;
2197 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2198 it != mStreamInfo.end(); it++) {
2199 if ((*it)->stream == newStream) {
2200 QCamera3ProcessingChannel *channel =
2201 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2202 stream_exists = true;
2203 if (channel)
2204 delete channel;
2205 (*it)->status = VALID;
2206 (*it)->stream->priv = NULL;
2207 (*it)->channel = NULL;
2208 }
2209 }
2210 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2211 //new stream
2212 stream_info_t* stream_info;
2213 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2214 if (!stream_info) {
2215 LOGE("Could not allocate stream info");
2216 rc = -ENOMEM;
2217 pthread_mutex_unlock(&mMutex);
2218 return rc;
2219 }
2220 stream_info->stream = newStream;
2221 stream_info->status = VALID;
2222 stream_info->channel = NULL;
2223 mStreamInfo.push_back(stream_info);
2224 }
2225 /* Covers Opaque ZSL and API1 F/W ZSL */
2226 if (IS_USAGE_ZSL(newStream->usage)
2227 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2228 if (zslStream != NULL) {
2229 LOGE("Multiple input/reprocess streams requested!");
2230 pthread_mutex_unlock(&mMutex);
2231 return BAD_VALUE;
2232 }
2233 zslStream = newStream;
2234 }
2235 /* Covers YUV reprocess */
2236 if (inputStream != NULL) {
2237 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2238 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2239 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2240 && inputStream->width == newStream->width
2241 && inputStream->height == newStream->height) {
2242 if (zslStream != NULL) {
2243 /* This scenario indicates multiple YUV streams with same size
2244 * as input stream have been requested, since zsl stream handle
2245 * is solely use for the purpose of overriding the size of streams
2246 * which share h/w streams we will just make a guess here as to
2247 * which of the stream is a ZSL stream, this will be refactored
2248 * once we make generic logic for streams sharing encoder output
2249 */
2250 LOGH("Warning, Multiple ip/reprocess streams requested!");
2251 }
2252 zslStream = newStream;
2253 }
2254 }
2255 }
2256
2257 /* If a zsl stream is set, we know that we have configured at least one input or
2258 bidirectional stream */
2259 if (NULL != zslStream) {
2260 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2261 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2262 mInputStreamInfo.format = zslStream->format;
2263 mInputStreamInfo.usage = zslStream->usage;
2264 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2265 mInputStreamInfo.dim.width,
2266 mInputStreamInfo.dim.height,
2267 mInputStreamInfo.format, mInputStreamInfo.usage);
2268 }
2269
2270 cleanAndSortStreamInfo();
2271 if (mMetadataChannel) {
2272 delete mMetadataChannel;
2273 mMetadataChannel = NULL;
2274 }
2275 if (mSupportChannel) {
2276 delete mSupportChannel;
2277 mSupportChannel = NULL;
2278 }
2279
2280 if (mAnalysisChannel) {
2281 delete mAnalysisChannel;
2282 mAnalysisChannel = NULL;
2283 }
2284
2285 if (mDummyBatchChannel) {
2286 delete mDummyBatchChannel;
2287 mDummyBatchChannel = NULL;
2288 }
2289
Emilian Peev7650c122017-01-19 08:24:33 -08002290 if (mDepthChannel) {
2291 mDepthChannel = NULL;
2292 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002293 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002294
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002295 mShutterDispatcher.clear();
2296 mOutputBufferDispatcher.clear();
2297
Thierry Strudel2896d122017-02-23 19:18:03 -08002298 char is_type_value[PROPERTY_VALUE_MAX];
2299 property_get("persist.camera.is_type", is_type_value, "4");
2300 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2301
Binhao Line406f062017-05-03 14:39:44 -07002302 char property_value[PROPERTY_VALUE_MAX];
2303 property_get("persist.camera.gzoom.at", property_value, "0");
2304 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002305 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2306 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2307 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2308 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002309
2310 property_get("persist.camera.gzoom.4k", property_value, "0");
2311 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2312
Thierry Strudel3d639192016-09-09 11:52:26 -07002313 //Create metadata channel and initialize it
2314 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2315 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2316 gCamCapability[mCameraId]->color_arrangement);
2317 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2318 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002319 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002320 if (mMetadataChannel == NULL) {
2321 LOGE("failed to allocate metadata channel");
2322 rc = -ENOMEM;
2323 pthread_mutex_unlock(&mMutex);
2324 return rc;
2325 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002326 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002327 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2328 if (rc < 0) {
2329 LOGE("metadata channel initialization failed");
2330 delete mMetadataChannel;
2331 mMetadataChannel = NULL;
2332 pthread_mutex_unlock(&mMutex);
2333 return rc;
2334 }
2335
Thierry Strudel2896d122017-02-23 19:18:03 -08002336 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002337 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002338 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002339 // Keep track of preview/video streams indices.
2340 // There could be more than one preview streams, but only one video stream.
2341 int32_t video_stream_idx = -1;
2342 int32_t preview_stream_idx[streamList->num_streams];
2343 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002344 bool previewTnr[streamList->num_streams];
2345 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2346 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2347 // Loop through once to determine preview TNR conditions before creating channels.
2348 for (size_t i = 0; i < streamList->num_streams; i++) {
2349 camera3_stream_t *newStream = streamList->streams[i];
2350 uint32_t stream_usage = newStream->usage;
2351 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2352 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2353 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2354 video_stream_idx = (int32_t)i;
2355 else
2356 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2357 }
2358 }
2359 // By default, preview stream TNR is disabled.
2360 // Enable TNR to the preview stream if all conditions below are satisfied:
2361 // 1. preview resolution == video resolution.
2362 // 2. video stream TNR is enabled.
2363 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2364 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2365 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2366 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2367 if (m_bTnrEnabled && m_bTnrVideo &&
2368 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2369 video_stream->width == preview_stream->width &&
2370 video_stream->height == preview_stream->height) {
2371 previewTnr[preview_stream_idx[i]] = true;
2372 }
2373 }
2374
Thierry Strudel3d639192016-09-09 11:52:26 -07002375 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2376 /* Allocate channel objects for the requested streams */
2377 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002378
Thierry Strudel3d639192016-09-09 11:52:26 -07002379 camera3_stream_t *newStream = streamList->streams[i];
2380 uint32_t stream_usage = newStream->usage;
2381 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2382 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2383 struct camera_info *p_info = NULL;
2384 pthread_mutex_lock(&gCamLock);
2385 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2386 pthread_mutex_unlock(&gCamLock);
2387 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2388 || IS_USAGE_ZSL(newStream->usage)) &&
2389 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002390 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002391 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002392 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2393 if (bUseCommonFeatureMask)
2394 zsl_ppmask = commonFeatureMask;
2395 else
2396 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002397 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002398 if (numStreamsOnEncoder > 0)
2399 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2400 else
2401 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002402 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002403 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002404 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002405 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002406 LOGH("Input stream configured, reprocess config");
2407 } else {
2408 //for non zsl streams find out the format
2409 switch (newStream->format) {
2410 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2411 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002412 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002413 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2414 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2415 /* add additional features to pp feature mask */
2416 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2417 mStreamConfigInfo.num_streams);
2418
2419 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2420 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2421 CAM_STREAM_TYPE_VIDEO;
2422 if (m_bTnrEnabled && m_bTnrVideo) {
2423 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2424 CAM_QCOM_FEATURE_CPP_TNR;
2425 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2426 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2427 ~CAM_QCOM_FEATURE_CDS;
2428 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002429 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2430 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2431 CAM_QTI_FEATURE_PPEISCORE;
2432 }
Binhao Line406f062017-05-03 14:39:44 -07002433 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2434 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2435 CAM_QCOM_FEATURE_GOOG_ZOOM;
2436 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 } else {
2438 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2439 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002440 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002441 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2442 CAM_QCOM_FEATURE_CPP_TNR;
2443 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2444 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2445 ~CAM_QCOM_FEATURE_CDS;
2446 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002447 if(!m_bSwTnrPreview) {
2448 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2449 ~CAM_QTI_FEATURE_SW_TNR;
2450 }
Binhao Line406f062017-05-03 14:39:44 -07002451 if (is_goog_zoom_preview_enabled) {
2452 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2453 CAM_QCOM_FEATURE_GOOG_ZOOM;
2454 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002455 padding_info.width_padding = mSurfaceStridePadding;
2456 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002457 previewSize.width = (int32_t)newStream->width;
2458 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002459 }
2460 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2461 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2462 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2463 newStream->height;
2464 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2465 newStream->width;
2466 }
2467 }
2468 break;
2469 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002470 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002471 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2472 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2473 if (bUseCommonFeatureMask)
2474 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2475 commonFeatureMask;
2476 else
2477 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2478 CAM_QCOM_FEATURE_NONE;
2479 } else {
2480 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2481 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2482 }
2483 break;
2484 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002485 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002486 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2487 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2488 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2489 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2490 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002491 /* Remove rotation if it is not supported
2492 for 4K LiveVideo snapshot case (online processing) */
2493 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2494 CAM_QCOM_FEATURE_ROTATION)) {
2495 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2496 &= ~CAM_QCOM_FEATURE_ROTATION;
2497 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002498 } else {
2499 if (bUseCommonFeatureMask &&
2500 isOnEncoder(maxViewfinderSize, newStream->width,
2501 newStream->height)) {
2502 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2503 } else {
2504 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2505 }
2506 }
2507 if (isZsl) {
2508 if (zslStream) {
2509 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2510 (int32_t)zslStream->width;
2511 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2512 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002513 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2514 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002515 } else {
2516 LOGE("Error, No ZSL stream identified");
2517 pthread_mutex_unlock(&mMutex);
2518 return -EINVAL;
2519 }
2520 } else if (m_bIs4KVideo) {
2521 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2522 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2523 } else if (bYuv888OverrideJpeg) {
2524 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2525 (int32_t)largeYuv888Size.width;
2526 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2527 (int32_t)largeYuv888Size.height;
2528 }
2529 break;
2530 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2531 case HAL_PIXEL_FORMAT_RAW16:
2532 case HAL_PIXEL_FORMAT_RAW10:
2533 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2534 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2535 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002536 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2537 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2538 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2539 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2540 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2541 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2542 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2543 gCamCapability[mCameraId]->dt[mPDIndex];
2544 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2545 gCamCapability[mCameraId]->vc[mPDIndex];
2546 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002547 break;
2548 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002549 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002550 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2551 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2552 break;
2553 }
2554 }
2555
2556 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2557 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2558 gCamCapability[mCameraId]->color_arrangement);
2559
2560 if (newStream->priv == NULL) {
2561 //New stream, construct channel
2562 switch (newStream->stream_type) {
2563 case CAMERA3_STREAM_INPUT:
2564 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2565 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2566 break;
2567 case CAMERA3_STREAM_BIDIRECTIONAL:
2568 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2569 GRALLOC_USAGE_HW_CAMERA_WRITE;
2570 break;
2571 case CAMERA3_STREAM_OUTPUT:
2572 /* For video encoding stream, set read/write rarely
2573 * flag so that they may be set to un-cached */
2574 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2575 newStream->usage |=
2576 (GRALLOC_USAGE_SW_READ_RARELY |
2577 GRALLOC_USAGE_SW_WRITE_RARELY |
2578 GRALLOC_USAGE_HW_CAMERA_WRITE);
2579 else if (IS_USAGE_ZSL(newStream->usage))
2580 {
2581 LOGD("ZSL usage flag skipping");
2582 }
2583 else if (newStream == zslStream
2584 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2585 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2586 } else
2587 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2588 break;
2589 default:
2590 LOGE("Invalid stream_type %d", newStream->stream_type);
2591 break;
2592 }
2593
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002594 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002595 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2596 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2597 QCamera3ProcessingChannel *channel = NULL;
2598 switch (newStream->format) {
2599 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2600 if ((newStream->usage &
2601 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2602 (streamList->operation_mode ==
2603 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2604 ) {
2605 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2606 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002607 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002608 this,
2609 newStream,
2610 (cam_stream_type_t)
2611 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2612 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2613 mMetadataChannel,
2614 0); //heap buffers are not required for HFR video channel
2615 if (channel == NULL) {
2616 LOGE("allocation of channel failed");
2617 pthread_mutex_unlock(&mMutex);
2618 return -ENOMEM;
2619 }
2620 //channel->getNumBuffers() will return 0 here so use
2621 //MAX_INFLIGH_HFR_REQUESTS
2622 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2623 newStream->priv = channel;
2624 LOGI("num video buffers in HFR mode: %d",
2625 MAX_INFLIGHT_HFR_REQUESTS);
2626 } else {
2627 /* Copy stream contents in HFR preview only case to create
2628 * dummy batch channel so that sensor streaming is in
2629 * HFR mode */
2630 if (!m_bIsVideo && (streamList->operation_mode ==
2631 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2632 mDummyBatchStream = *newStream;
2633 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002634 int bufferCount = MAX_INFLIGHT_REQUESTS;
2635 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2636 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002637 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2638 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2639 bufferCount = m_bIs4KVideo ?
2640 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2641 }
2642
Thierry Strudel2896d122017-02-23 19:18:03 -08002643 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002644 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2645 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002646 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002647 this,
2648 newStream,
2649 (cam_stream_type_t)
2650 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2651 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2652 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002653 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002654 if (channel == NULL) {
2655 LOGE("allocation of channel failed");
2656 pthread_mutex_unlock(&mMutex);
2657 return -ENOMEM;
2658 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002659 /* disable UBWC for preview, though supported,
2660 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002661 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002662 (previewSize.width == (int32_t)videoWidth)&&
2663 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002664 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002665 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002666 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002667 /* When goog_zoom is linked to the preview or video stream,
2668 * disable ubwc to the linked stream */
2669 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2670 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2671 channel->setUBWCEnabled(false);
2672 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002673 newStream->max_buffers = channel->getNumBuffers();
2674 newStream->priv = channel;
2675 }
2676 break;
2677 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2678 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2679 mChannelHandle,
2680 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002681 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002682 this,
2683 newStream,
2684 (cam_stream_type_t)
2685 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2686 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2687 mMetadataChannel);
2688 if (channel == NULL) {
2689 LOGE("allocation of YUV channel failed");
2690 pthread_mutex_unlock(&mMutex);
2691 return -ENOMEM;
2692 }
2693 newStream->max_buffers = channel->getNumBuffers();
2694 newStream->priv = channel;
2695 break;
2696 }
2697 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2698 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002699 case HAL_PIXEL_FORMAT_RAW10: {
2700 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2701 (HAL_DATASPACE_DEPTH != newStream->data_space))
2702 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002703 mRawChannel = new QCamera3RawChannel(
2704 mCameraHandle->camera_handle, mChannelHandle,
2705 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002706 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002707 this, newStream,
2708 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002709 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002710 if (mRawChannel == NULL) {
2711 LOGE("allocation of raw channel failed");
2712 pthread_mutex_unlock(&mMutex);
2713 return -ENOMEM;
2714 }
2715 newStream->max_buffers = mRawChannel->getNumBuffers();
2716 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2717 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002718 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002719 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002720 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2721 mDepthChannel = new QCamera3DepthChannel(
2722 mCameraHandle->camera_handle, mChannelHandle,
2723 mCameraHandle->ops, NULL, NULL, &padding_info,
2724 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2725 mMetadataChannel);
2726 if (NULL == mDepthChannel) {
2727 LOGE("Allocation of depth channel failed");
2728 pthread_mutex_unlock(&mMutex);
2729 return NO_MEMORY;
2730 }
2731 newStream->priv = mDepthChannel;
2732 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2733 } else {
2734 // Max live snapshot inflight buffer is 1. This is to mitigate
2735 // frame drop issues for video snapshot. The more buffers being
2736 // allocated, the more frame drops there are.
2737 mPictureChannel = new QCamera3PicChannel(
2738 mCameraHandle->camera_handle, mChannelHandle,
2739 mCameraHandle->ops, captureResultCb,
2740 setBufferErrorStatus, &padding_info, this, newStream,
2741 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2742 m_bIs4KVideo, isZsl, mMetadataChannel,
2743 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2744 if (mPictureChannel == NULL) {
2745 LOGE("allocation of channel failed");
2746 pthread_mutex_unlock(&mMutex);
2747 return -ENOMEM;
2748 }
2749 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2750 newStream->max_buffers = mPictureChannel->getNumBuffers();
2751 mPictureChannel->overrideYuvSize(
2752 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2753 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002754 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002755 break;
2756
2757 default:
2758 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002759 pthread_mutex_unlock(&mMutex);
2760 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002761 }
2762 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2763 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2764 } else {
2765 LOGE("Error, Unknown stream type");
2766 pthread_mutex_unlock(&mMutex);
2767 return -EINVAL;
2768 }
2769
2770 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002771 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002772 // Here we only care whether it's EIS3 or not
2773 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2774 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2775 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2776 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002777 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002778 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002779 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002780 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2781 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2782 }
2783 }
2784
2785 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2786 it != mStreamInfo.end(); it++) {
2787 if ((*it)->stream == newStream) {
2788 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2789 break;
2790 }
2791 }
2792 } else {
2793 // Channel already exists for this stream
2794 // Do nothing for now
2795 }
2796 padding_info = gCamCapability[mCameraId]->padding_info;
2797
Emilian Peev7650c122017-01-19 08:24:33 -08002798 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002799 * since there is no real stream associated with it
2800 */
Emilian Peev7650c122017-01-19 08:24:33 -08002801 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002802 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2803 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002804 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002805 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002806 }
2807
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002808 // Let buffer dispatcher know the configured streams.
2809 mOutputBufferDispatcher.configureStreams(streamList);
2810
Thierry Strudel2896d122017-02-23 19:18:03 -08002811 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2812 onlyRaw = false;
2813 }
2814
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002815 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002816 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002817 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002818 cam_analysis_info_t analysisInfo;
2819 int32_t ret = NO_ERROR;
2820 ret = mCommon.getAnalysisInfo(
2821 FALSE,
2822 analysisFeatureMask,
2823 &analysisInfo);
2824 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002825 cam_color_filter_arrangement_t analysis_color_arrangement =
2826 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2827 CAM_FILTER_ARRANGEMENT_Y :
2828 gCamCapability[mCameraId]->color_arrangement);
2829 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2830 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002831 cam_dimension_t analysisDim;
2832 analysisDim = mCommon.getMatchingDimension(previewSize,
2833 analysisInfo.analysis_recommended_res);
2834
2835 mAnalysisChannel = new QCamera3SupportChannel(
2836 mCameraHandle->camera_handle,
2837 mChannelHandle,
2838 mCameraHandle->ops,
2839 &analysisInfo.analysis_padding_info,
2840 analysisFeatureMask,
2841 CAM_STREAM_TYPE_ANALYSIS,
2842 &analysisDim,
2843 (analysisInfo.analysis_format
2844 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2845 : CAM_FORMAT_YUV_420_NV21),
2846 analysisInfo.hw_analysis_supported,
2847 gCamCapability[mCameraId]->color_arrangement,
2848 this,
2849 0); // force buffer count to 0
2850 } else {
2851 LOGW("getAnalysisInfo failed, ret = %d", ret);
2852 }
2853 if (!mAnalysisChannel) {
2854 LOGW("Analysis channel cannot be created");
2855 }
2856 }
2857
Thierry Strudel3d639192016-09-09 11:52:26 -07002858 //RAW DUMP channel
2859 if (mEnableRawDump && isRawStreamRequested == false){
2860 cam_dimension_t rawDumpSize;
2861 rawDumpSize = getMaxRawSize(mCameraId);
2862 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2863 setPAAFSupport(rawDumpFeatureMask,
2864 CAM_STREAM_TYPE_RAW,
2865 gCamCapability[mCameraId]->color_arrangement);
2866 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2867 mChannelHandle,
2868 mCameraHandle->ops,
2869 rawDumpSize,
2870 &padding_info,
2871 this, rawDumpFeatureMask);
2872 if (!mRawDumpChannel) {
2873 LOGE("Raw Dump channel cannot be created");
2874 pthread_mutex_unlock(&mMutex);
2875 return -ENOMEM;
2876 }
2877 }
2878
Thierry Strudel3d639192016-09-09 11:52:26 -07002879 if (mAnalysisChannel) {
2880 cam_analysis_info_t analysisInfo;
2881 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2882 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2883 CAM_STREAM_TYPE_ANALYSIS;
2884 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2885 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002886 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002887 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2888 &analysisInfo);
2889 if (rc != NO_ERROR) {
2890 LOGE("getAnalysisInfo failed, ret = %d", rc);
2891 pthread_mutex_unlock(&mMutex);
2892 return rc;
2893 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002894 cam_color_filter_arrangement_t analysis_color_arrangement =
2895 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2896 CAM_FILTER_ARRANGEMENT_Y :
2897 gCamCapability[mCameraId]->color_arrangement);
2898 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2899 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2900 analysis_color_arrangement);
2901
Thierry Strudel3d639192016-09-09 11:52:26 -07002902 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002903 mCommon.getMatchingDimension(previewSize,
2904 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002905 mStreamConfigInfo.num_streams++;
2906 }
2907
Thierry Strudel2896d122017-02-23 19:18:03 -08002908 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002909 cam_analysis_info_t supportInfo;
2910 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2911 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2912 setPAAFSupport(callbackFeatureMask,
2913 CAM_STREAM_TYPE_CALLBACK,
2914 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002915 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002916 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002917 if (ret != NO_ERROR) {
2918 /* Ignore the error for Mono camera
2919 * because the PAAF bit mask is only set
2920 * for CAM_STREAM_TYPE_ANALYSIS stream type
2921 */
2922 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2923 LOGW("getAnalysisInfo failed, ret = %d", ret);
2924 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002925 }
2926 mSupportChannel = new QCamera3SupportChannel(
2927 mCameraHandle->camera_handle,
2928 mChannelHandle,
2929 mCameraHandle->ops,
2930 &gCamCapability[mCameraId]->padding_info,
2931 callbackFeatureMask,
2932 CAM_STREAM_TYPE_CALLBACK,
2933 &QCamera3SupportChannel::kDim,
2934 CAM_FORMAT_YUV_420_NV21,
2935 supportInfo.hw_analysis_supported,
2936 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002937 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002938 if (!mSupportChannel) {
2939 LOGE("dummy channel cannot be created");
2940 pthread_mutex_unlock(&mMutex);
2941 return -ENOMEM;
2942 }
2943 }
2944
2945 if (mSupportChannel) {
2946 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2947 QCamera3SupportChannel::kDim;
2948 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2949 CAM_STREAM_TYPE_CALLBACK;
2950 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2951 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2952 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2953 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2954 gCamCapability[mCameraId]->color_arrangement);
2955 mStreamConfigInfo.num_streams++;
2956 }
2957
2958 if (mRawDumpChannel) {
2959 cam_dimension_t rawSize;
2960 rawSize = getMaxRawSize(mCameraId);
2961 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2962 rawSize;
2963 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2964 CAM_STREAM_TYPE_RAW;
2965 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2966 CAM_QCOM_FEATURE_NONE;
2967 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2968 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2969 gCamCapability[mCameraId]->color_arrangement);
2970 mStreamConfigInfo.num_streams++;
2971 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002972
2973 if (mHdrPlusRawSrcChannel) {
2974 cam_dimension_t rawSize;
2975 rawSize = getMaxRawSize(mCameraId);
2976 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2977 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2978 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2979 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2980 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2981 gCamCapability[mCameraId]->color_arrangement);
2982 mStreamConfigInfo.num_streams++;
2983 }
2984
Thierry Strudel3d639192016-09-09 11:52:26 -07002985 /* In HFR mode, if video stream is not added, create a dummy channel so that
2986 * ISP can create a batch mode even for preview only case. This channel is
2987 * never 'start'ed (no stream-on), it is only 'initialized' */
2988 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2989 !m_bIsVideo) {
2990 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2991 setPAAFSupport(dummyFeatureMask,
2992 CAM_STREAM_TYPE_VIDEO,
2993 gCamCapability[mCameraId]->color_arrangement);
2994 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2995 mChannelHandle,
2996 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002997 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002998 this,
2999 &mDummyBatchStream,
3000 CAM_STREAM_TYPE_VIDEO,
3001 dummyFeatureMask,
3002 mMetadataChannel);
3003 if (NULL == mDummyBatchChannel) {
3004 LOGE("creation of mDummyBatchChannel failed."
3005 "Preview will use non-hfr sensor mode ");
3006 }
3007 }
3008 if (mDummyBatchChannel) {
3009 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3010 mDummyBatchStream.width;
3011 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3012 mDummyBatchStream.height;
3013 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3014 CAM_STREAM_TYPE_VIDEO;
3015 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3016 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3017 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3018 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3019 gCamCapability[mCameraId]->color_arrangement);
3020 mStreamConfigInfo.num_streams++;
3021 }
3022
3023 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3024 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003025 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003026 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003027
3028 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3029 for (pendingRequestIterator i = mPendingRequestsList.begin();
3030 i != mPendingRequestsList.end();) {
3031 i = erasePendingRequest(i);
3032 }
3033 mPendingFrameDropList.clear();
3034 // Initialize/Reset the pending buffers list
3035 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3036 req.mPendingBufferList.clear();
3037 }
3038 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003039 mExpectedInflightDuration = 0;
3040 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003041
Thierry Strudel3d639192016-09-09 11:52:26 -07003042 mCurJpegMeta.clear();
3043 //Get min frame duration for this streams configuration
3044 deriveMinFrameDuration();
3045
Chien-Yu Chenee335912017-02-09 17:53:20 -08003046 mFirstPreviewIntentSeen = false;
3047
3048 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003049 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003050 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3051 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003052 disableHdrPlusModeLocked();
3053 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003054
Thierry Strudel3d639192016-09-09 11:52:26 -07003055 // Update state
3056 mState = CONFIGURED;
3057
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003058 mFirstMetadataCallback = true;
3059
Thierry Strudel3d639192016-09-09 11:52:26 -07003060 pthread_mutex_unlock(&mMutex);
3061
3062 return rc;
3063}
3064
3065/*===========================================================================
3066 * FUNCTION : validateCaptureRequest
3067 *
3068 * DESCRIPTION: validate a capture request from camera service
3069 *
3070 * PARAMETERS :
3071 * @request : request from framework to process
3072 *
3073 * RETURN :
3074 *
3075 *==========================================================================*/
3076int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003077 camera3_capture_request_t *request,
3078 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003079{
3080 ssize_t idx = 0;
3081 const camera3_stream_buffer_t *b;
3082 CameraMetadata meta;
3083
3084 /* Sanity check the request */
3085 if (request == NULL) {
3086 LOGE("NULL capture request");
3087 return BAD_VALUE;
3088 }
3089
3090 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3091 /*settings cannot be null for the first request*/
3092 return BAD_VALUE;
3093 }
3094
3095 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003096 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3097 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003098 LOGE("Request %d: No output buffers provided!",
3099 __FUNCTION__, frameNumber);
3100 return BAD_VALUE;
3101 }
3102 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3103 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3104 request->num_output_buffers, MAX_NUM_STREAMS);
3105 return BAD_VALUE;
3106 }
3107 if (request->input_buffer != NULL) {
3108 b = request->input_buffer;
3109 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3110 LOGE("Request %d: Buffer %ld: Status not OK!",
3111 frameNumber, (long)idx);
3112 return BAD_VALUE;
3113 }
3114 if (b->release_fence != -1) {
3115 LOGE("Request %d: Buffer %ld: Has a release fence!",
3116 frameNumber, (long)idx);
3117 return BAD_VALUE;
3118 }
3119 if (b->buffer == NULL) {
3120 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3121 frameNumber, (long)idx);
3122 return BAD_VALUE;
3123 }
3124 }
3125
3126 // Validate all buffers
3127 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003128 if (b == NULL) {
3129 return BAD_VALUE;
3130 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003131 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003132 QCamera3ProcessingChannel *channel =
3133 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3134 if (channel == NULL) {
3135 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3136 frameNumber, (long)idx);
3137 return BAD_VALUE;
3138 }
3139 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3140 LOGE("Request %d: Buffer %ld: Status not OK!",
3141 frameNumber, (long)idx);
3142 return BAD_VALUE;
3143 }
3144 if (b->release_fence != -1) {
3145 LOGE("Request %d: Buffer %ld: Has a release fence!",
3146 frameNumber, (long)idx);
3147 return BAD_VALUE;
3148 }
3149 if (b->buffer == NULL) {
3150 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3151 frameNumber, (long)idx);
3152 return BAD_VALUE;
3153 }
3154 if (*(b->buffer) == NULL) {
3155 LOGE("Request %d: Buffer %ld: NULL private handle!",
3156 frameNumber, (long)idx);
3157 return BAD_VALUE;
3158 }
3159 idx++;
3160 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003161 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003162 return NO_ERROR;
3163}
3164
3165/*===========================================================================
3166 * FUNCTION : deriveMinFrameDuration
3167 *
3168 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3169 * on currently configured streams.
3170 *
3171 * PARAMETERS : NONE
3172 *
3173 * RETURN : NONE
3174 *
3175 *==========================================================================*/
3176void QCamera3HardwareInterface::deriveMinFrameDuration()
3177{
3178 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003179 bool hasRaw = false;
3180
3181 mMinRawFrameDuration = 0;
3182 mMinJpegFrameDuration = 0;
3183 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003184
3185 maxJpegDim = 0;
3186 maxProcessedDim = 0;
3187 maxRawDim = 0;
3188
3189 // Figure out maximum jpeg, processed, and raw dimensions
3190 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3191 it != mStreamInfo.end(); it++) {
3192
3193 // Input stream doesn't have valid stream_type
3194 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3195 continue;
3196
3197 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3198 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3199 if (dimension > maxJpegDim)
3200 maxJpegDim = dimension;
3201 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3202 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3203 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003204 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003205 if (dimension > maxRawDim)
3206 maxRawDim = dimension;
3207 } else {
3208 if (dimension > maxProcessedDim)
3209 maxProcessedDim = dimension;
3210 }
3211 }
3212
3213 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3214 MAX_SIZES_CNT);
3215
3216 //Assume all jpeg dimensions are in processed dimensions.
3217 if (maxJpegDim > maxProcessedDim)
3218 maxProcessedDim = maxJpegDim;
3219 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003220 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003221 maxRawDim = INT32_MAX;
3222
3223 for (size_t i = 0; i < count; i++) {
3224 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3225 gCamCapability[mCameraId]->raw_dim[i].height;
3226 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3227 maxRawDim = dimension;
3228 }
3229 }
3230
3231 //Find minimum durations for processed, jpeg, and raw
3232 for (size_t i = 0; i < count; i++) {
3233 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3234 gCamCapability[mCameraId]->raw_dim[i].height) {
3235 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3236 break;
3237 }
3238 }
3239 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3240 for (size_t i = 0; i < count; i++) {
3241 if (maxProcessedDim ==
3242 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3243 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3244 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3245 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3246 break;
3247 }
3248 }
3249}
3250
3251/*===========================================================================
3252 * FUNCTION : getMinFrameDuration
3253 *
3254 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3255 * and current request configuration.
3256 *
3257 * PARAMETERS : @request: requset sent by the frameworks
3258 *
3259 * RETURN : min farme duration for a particular request
3260 *
3261 *==========================================================================*/
3262int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3263{
3264 bool hasJpegStream = false;
3265 bool hasRawStream = false;
3266 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3267 const camera3_stream_t *stream = request->output_buffers[i].stream;
3268 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3269 hasJpegStream = true;
3270 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3271 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3272 stream->format == HAL_PIXEL_FORMAT_RAW16)
3273 hasRawStream = true;
3274 }
3275
3276 if (!hasJpegStream)
3277 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3278 else
3279 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3280}
3281
3282/*===========================================================================
3283 * FUNCTION : handleBuffersDuringFlushLock
3284 *
3285 * DESCRIPTION: Account for buffers returned from back-end during flush
3286 * This function is executed while mMutex is held by the caller.
3287 *
3288 * PARAMETERS :
3289 * @buffer: image buffer for the callback
3290 *
3291 * RETURN :
3292 *==========================================================================*/
3293void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3294{
3295 bool buffer_found = false;
3296 for (List<PendingBuffersInRequest>::iterator req =
3297 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3298 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3299 for (List<PendingBufferInfo>::iterator i =
3300 req->mPendingBufferList.begin();
3301 i != req->mPendingBufferList.end(); i++) {
3302 if (i->buffer == buffer->buffer) {
3303 mPendingBuffersMap.numPendingBufsAtFlush--;
3304 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3305 buffer->buffer, req->frame_number,
3306 mPendingBuffersMap.numPendingBufsAtFlush);
3307 buffer_found = true;
3308 break;
3309 }
3310 }
3311 if (buffer_found) {
3312 break;
3313 }
3314 }
3315 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3316 //signal the flush()
3317 LOGD("All buffers returned to HAL. Continue flush");
3318 pthread_cond_signal(&mBuffersCond);
3319 }
3320}
3321
Thierry Strudel3d639192016-09-09 11:52:26 -07003322/*===========================================================================
3323 * FUNCTION : handleBatchMetadata
3324 *
3325 * DESCRIPTION: Handles metadata buffer callback in batch mode
3326 *
3327 * PARAMETERS : @metadata_buf: metadata buffer
3328 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3329 * the meta buf in this method
3330 *
3331 * RETURN :
3332 *
3333 *==========================================================================*/
3334void QCamera3HardwareInterface::handleBatchMetadata(
3335 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3336{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003337 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003338
3339 if (NULL == metadata_buf) {
3340 LOGE("metadata_buf is NULL");
3341 return;
3342 }
3343 /* In batch mode, the metdata will contain the frame number and timestamp of
3344 * the last frame in the batch. Eg: a batch containing buffers from request
3345 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3346 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3347 * multiple process_capture_results */
3348 metadata_buffer_t *metadata =
3349 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3350 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3351 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3352 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3353 uint32_t frame_number = 0, urgent_frame_number = 0;
3354 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3355 bool invalid_metadata = false;
3356 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3357 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003358 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003359
3360 int32_t *p_frame_number_valid =
3361 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3362 uint32_t *p_frame_number =
3363 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3364 int64_t *p_capture_time =
3365 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3366 int32_t *p_urgent_frame_number_valid =
3367 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3368 uint32_t *p_urgent_frame_number =
3369 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3370
3371 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3372 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3373 (NULL == p_urgent_frame_number)) {
3374 LOGE("Invalid metadata");
3375 invalid_metadata = true;
3376 } else {
3377 frame_number_valid = *p_frame_number_valid;
3378 last_frame_number = *p_frame_number;
3379 last_frame_capture_time = *p_capture_time;
3380 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3381 last_urgent_frame_number = *p_urgent_frame_number;
3382 }
3383
3384 /* In batchmode, when no video buffers are requested, set_parms are sent
3385 * for every capture_request. The difference between consecutive urgent
3386 * frame numbers and frame numbers should be used to interpolate the
3387 * corresponding frame numbers and time stamps */
3388 pthread_mutex_lock(&mMutex);
3389 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003390 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3391 if(idx < 0) {
3392 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3393 last_urgent_frame_number);
3394 mState = ERROR;
3395 pthread_mutex_unlock(&mMutex);
3396 return;
3397 }
3398 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003399 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3400 first_urgent_frame_number;
3401
3402 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3403 urgent_frame_number_valid,
3404 first_urgent_frame_number, last_urgent_frame_number);
3405 }
3406
3407 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003408 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3409 if(idx < 0) {
3410 LOGE("Invalid frame number received: %d. Irrecoverable error",
3411 last_frame_number);
3412 mState = ERROR;
3413 pthread_mutex_unlock(&mMutex);
3414 return;
3415 }
3416 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003417 frameNumDiff = last_frame_number + 1 -
3418 first_frame_number;
3419 mPendingBatchMap.removeItem(last_frame_number);
3420
3421 LOGD("frm: valid: %d frm_num: %d - %d",
3422 frame_number_valid,
3423 first_frame_number, last_frame_number);
3424
3425 }
3426 pthread_mutex_unlock(&mMutex);
3427
3428 if (urgent_frame_number_valid || frame_number_valid) {
3429 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3430 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3431 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3432 urgentFrameNumDiff, last_urgent_frame_number);
3433 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3434 LOGE("frameNumDiff: %d frameNum: %d",
3435 frameNumDiff, last_frame_number);
3436 }
3437
3438 for (size_t i = 0; i < loopCount; i++) {
3439 /* handleMetadataWithLock is called even for invalid_metadata for
3440 * pipeline depth calculation */
3441 if (!invalid_metadata) {
3442 /* Infer frame number. Batch metadata contains frame number of the
3443 * last frame */
3444 if (urgent_frame_number_valid) {
3445 if (i < urgentFrameNumDiff) {
3446 urgent_frame_number =
3447 first_urgent_frame_number + i;
3448 LOGD("inferred urgent frame_number: %d",
3449 urgent_frame_number);
3450 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3451 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3452 } else {
3453 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3454 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3455 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3456 }
3457 }
3458
3459 /* Infer frame number. Batch metadata contains frame number of the
3460 * last frame */
3461 if (frame_number_valid) {
3462 if (i < frameNumDiff) {
3463 frame_number = first_frame_number + i;
3464 LOGD("inferred frame_number: %d", frame_number);
3465 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3466 CAM_INTF_META_FRAME_NUMBER, frame_number);
3467 } else {
3468 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3469 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3470 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3471 }
3472 }
3473
3474 if (last_frame_capture_time) {
3475 //Infer timestamp
3476 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003477 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003478 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003479 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003480 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3481 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3482 LOGD("batch capture_time: %lld, capture_time: %lld",
3483 last_frame_capture_time, capture_time);
3484 }
3485 }
3486 pthread_mutex_lock(&mMutex);
3487 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003488 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003489 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3490 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003491 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003492 pthread_mutex_unlock(&mMutex);
3493 }
3494
3495 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003496 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003497 mMetadataChannel->bufDone(metadata_buf);
3498 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003499 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003500 }
3501}
3502
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003503void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3504 camera3_error_msg_code_t errorCode)
3505{
3506 camera3_notify_msg_t notify_msg;
3507 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3508 notify_msg.type = CAMERA3_MSG_ERROR;
3509 notify_msg.message.error.error_code = errorCode;
3510 notify_msg.message.error.error_stream = NULL;
3511 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003512 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003513
3514 return;
3515}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003516
3517/*===========================================================================
3518 * FUNCTION : sendPartialMetadataWithLock
3519 *
3520 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3521 *
3522 * PARAMETERS : @metadata: metadata buffer
3523 * @requestIter: The iterator for the pending capture request for
3524 * which the partial result is being sen
3525 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3526 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003527 * @isJumpstartMetadata: Whether this is a partial metadata for
3528 * jumpstart, i.e. even though it doesn't map to a valid partial
3529 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003530 *
3531 * RETURN :
3532 *
3533 *==========================================================================*/
3534
3535void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3536 metadata_buffer_t *metadata,
3537 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003538 bool lastUrgentMetadataInBatch,
3539 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003540{
3541 camera3_capture_result_t result;
3542 memset(&result, 0, sizeof(camera3_capture_result_t));
3543
3544 requestIter->partial_result_cnt++;
3545
3546 // Extract 3A metadata
3547 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003548 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3549 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003550 // Populate metadata result
3551 result.frame_number = requestIter->frame_number;
3552 result.num_output_buffers = 0;
3553 result.output_buffers = NULL;
3554 result.partial_result = requestIter->partial_result_cnt;
3555
3556 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003557 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003558 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3559 // Notify HDR+ client about the partial metadata.
3560 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3561 result.partial_result == PARTIAL_RESULT_COUNT);
3562 }
3563 }
3564
3565 orchestrateResult(&result);
3566 LOGD("urgent frame_number = %u", result.frame_number);
3567 free_camera_metadata((camera_metadata_t *)result.result);
3568}
3569
Thierry Strudel3d639192016-09-09 11:52:26 -07003570/*===========================================================================
3571 * FUNCTION : handleMetadataWithLock
3572 *
3573 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3574 *
3575 * PARAMETERS : @metadata_buf: metadata buffer
3576 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3577 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003578 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3579 * last urgent metadata in a batch. Always true for non-batch mode
3580 * @lastMetadataInBatch: Boolean to indicate whether this is the
3581 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003582 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3583 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003584 *
3585 * RETURN :
3586 *
3587 *==========================================================================*/
3588void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003589 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003590 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3591 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003592{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003593 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003594 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3595 //during flush do not send metadata from this thread
3596 LOGD("not sending metadata during flush or when mState is error");
3597 if (free_and_bufdone_meta_buf) {
3598 mMetadataChannel->bufDone(metadata_buf);
3599 free(metadata_buf);
3600 }
3601 return;
3602 }
3603
3604 //not in flush
3605 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3606 int32_t frame_number_valid, urgent_frame_number_valid;
3607 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003608 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003609 nsecs_t currentSysTime;
3610
3611 int32_t *p_frame_number_valid =
3612 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3613 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3614 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003615 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003616 int32_t *p_urgent_frame_number_valid =
3617 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3618 uint32_t *p_urgent_frame_number =
3619 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3620 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3621 metadata) {
3622 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3623 *p_frame_number_valid, *p_frame_number);
3624 }
3625
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003626 camera_metadata_t *resultMetadata = nullptr;
3627
Thierry Strudel3d639192016-09-09 11:52:26 -07003628 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3629 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3630 LOGE("Invalid metadata");
3631 if (free_and_bufdone_meta_buf) {
3632 mMetadataChannel->bufDone(metadata_buf);
3633 free(metadata_buf);
3634 }
3635 goto done_metadata;
3636 }
3637 frame_number_valid = *p_frame_number_valid;
3638 frame_number = *p_frame_number;
3639 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003640 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003641 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3642 urgent_frame_number = *p_urgent_frame_number;
3643 currentSysTime = systemTime(CLOCK_MONOTONIC);
3644
Jason Lee603176d2017-05-31 11:43:27 -07003645 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3646 const int tries = 3;
3647 nsecs_t bestGap, measured;
3648 for (int i = 0; i < tries; ++i) {
3649 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3650 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3651 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3652 const nsecs_t gap = tmono2 - tmono;
3653 if (i == 0 || gap < bestGap) {
3654 bestGap = gap;
3655 measured = tbase - ((tmono + tmono2) >> 1);
3656 }
3657 }
3658 capture_time -= measured;
3659 }
3660
Thierry Strudel3d639192016-09-09 11:52:26 -07003661 // Detect if buffers from any requests are overdue
3662 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003663 int64_t timeout;
3664 {
3665 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3666 // If there is a pending HDR+ request, the following requests may be blocked until the
3667 // HDR+ request is done. So allow a longer timeout.
3668 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3669 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003670 if (timeout < mExpectedInflightDuration) {
3671 timeout = mExpectedInflightDuration;
3672 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003673 }
3674
3675 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003676 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003677 assert(missed.stream->priv);
3678 if (missed.stream->priv) {
3679 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3680 assert(ch->mStreams[0]);
3681 if (ch->mStreams[0]) {
3682 LOGE("Cancel missing frame = %d, buffer = %p,"
3683 "stream type = %d, stream format = %d",
3684 req.frame_number, missed.buffer,
3685 ch->mStreams[0]->getMyType(), missed.stream->format);
3686 ch->timeoutFrame(req.frame_number);
3687 }
3688 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003689 }
3690 }
3691 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003692 //For the very first metadata callback, regardless whether it contains valid
3693 //frame number, send the partial metadata for the jumpstarting requests.
3694 //Note that this has to be done even if the metadata doesn't contain valid
3695 //urgent frame number, because in the case only 1 request is ever submitted
3696 //to HAL, there won't be subsequent valid urgent frame number.
3697 if (mFirstMetadataCallback) {
3698 for (pendingRequestIterator i =
3699 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3700 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003701 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3702 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003703 }
3704 }
3705 mFirstMetadataCallback = false;
3706 }
3707
Thierry Strudel3d639192016-09-09 11:52:26 -07003708 //Partial result on process_capture_result for timestamp
3709 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003710 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003711
3712 //Recieved an urgent Frame Number, handle it
3713 //using partial results
3714 for (pendingRequestIterator i =
3715 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3716 LOGD("Iterator Frame = %d urgent frame = %d",
3717 i->frame_number, urgent_frame_number);
3718
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003719 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003720 (i->partial_result_cnt == 0)) {
3721 LOGE("Error: HAL missed urgent metadata for frame number %d",
3722 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003723 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003724 }
3725
3726 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003727 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003728 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3729 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003730 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3731 // Instant AEC settled for this frame.
3732 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3733 mInstantAECSettledFrameNumber = urgent_frame_number;
3734 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003735 break;
3736 }
3737 }
3738 }
3739
3740 if (!frame_number_valid) {
3741 LOGD("Not a valid normal frame number, used as SOF only");
3742 if (free_and_bufdone_meta_buf) {
3743 mMetadataChannel->bufDone(metadata_buf);
3744 free(metadata_buf);
3745 }
3746 goto done_metadata;
3747 }
3748 LOGH("valid frame_number = %u, capture_time = %lld",
3749 frame_number, capture_time);
3750
Emilian Peev4e0fe952017-06-30 12:40:09 -07003751 handleDepthDataLocked(metadata->depth_data, frame_number,
3752 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003753
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003754 // Check whether any stream buffer corresponding to this is dropped or not
3755 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3756 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3757 for (auto & pendingRequest : mPendingRequestsList) {
3758 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3759 mInstantAECSettledFrameNumber)) {
3760 camera3_notify_msg_t notify_msg = {};
3761 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003762 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003763 QCamera3ProcessingChannel *channel =
3764 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003765 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003766 if (p_cam_frame_drop) {
3767 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003768 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003769 // Got the stream ID for drop frame.
3770 dropFrame = true;
3771 break;
3772 }
3773 }
3774 } else {
3775 // This is instant AEC case.
3776 // For instant AEC drop the stream untill AEC is settled.
3777 dropFrame = true;
3778 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003779
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003780 if (dropFrame) {
3781 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3782 if (p_cam_frame_drop) {
3783 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003784 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003785 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003786 } else {
3787 // For instant AEC, inform frame drop and frame number
3788 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3789 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003790 pendingRequest.frame_number, streamID,
3791 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003792 }
3793 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003794 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003795 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003796 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003797 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003798 if (p_cam_frame_drop) {
3799 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003800 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003801 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003802 } else {
3803 // For instant AEC, inform frame drop and frame number
3804 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3805 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003806 pendingRequest.frame_number, streamID,
3807 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003808 }
3809 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003810 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003811 PendingFrameDrop.stream_ID = streamID;
3812 // Add the Frame drop info to mPendingFrameDropList
3813 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003814 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003815 }
3816 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003818
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003819 for (auto & pendingRequest : mPendingRequestsList) {
3820 // Find the pending request with the frame number.
3821 if (pendingRequest.frame_number == frame_number) {
3822 // Update the sensor timestamp.
3823 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003824
Thierry Strudel3d639192016-09-09 11:52:26 -07003825
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003826 /* Set the timestamp in display metadata so that clients aware of
3827 private_handle such as VT can use this un-modified timestamps.
3828 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003829 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003830
Thierry Strudel3d639192016-09-09 11:52:26 -07003831 // Find channel requiring metadata, meaning internal offline postprocess
3832 // is needed.
3833 //TODO: for now, we don't support two streams requiring metadata at the same time.
3834 // (because we are not making copies, and metadata buffer is not reference counted.
3835 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003836 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3837 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003838 if (iter->need_metadata) {
3839 internalPproc = true;
3840 QCamera3ProcessingChannel *channel =
3841 (QCamera3ProcessingChannel *)iter->stream->priv;
3842 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003843 if(p_is_metabuf_queued != NULL) {
3844 *p_is_metabuf_queued = true;
3845 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003846 break;
3847 }
3848 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003849 for (auto itr = pendingRequest.internalRequestList.begin();
3850 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003851 if (itr->need_metadata) {
3852 internalPproc = true;
3853 QCamera3ProcessingChannel *channel =
3854 (QCamera3ProcessingChannel *)itr->stream->priv;
3855 channel->queueReprocMetadata(metadata_buf);
3856 break;
3857 }
3858 }
3859
Thierry Strudel54dc9782017-02-15 12:12:10 -08003860 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003861
3862 bool *enableZsl = nullptr;
3863 if (gExposeEnableZslKey) {
3864 enableZsl = &pendingRequest.enableZsl;
3865 }
3866
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003867 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003868 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003869 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003870
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003871 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003872
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003873 if (pendingRequest.blob_request) {
3874 //Dump tuning metadata if enabled and available
3875 char prop[PROPERTY_VALUE_MAX];
3876 memset(prop, 0, sizeof(prop));
3877 property_get("persist.camera.dumpmetadata", prop, "0");
3878 int32_t enabled = atoi(prop);
3879 if (enabled && metadata->is_tuning_params_valid) {
3880 dumpMetadataToFile(metadata->tuning_params,
3881 mMetaFrameCount,
3882 enabled,
3883 "Snapshot",
3884 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003885 }
3886 }
3887
3888 if (!internalPproc) {
3889 LOGD("couldn't find need_metadata for this metadata");
3890 // Return metadata buffer
3891 if (free_and_bufdone_meta_buf) {
3892 mMetadataChannel->bufDone(metadata_buf);
3893 free(metadata_buf);
3894 }
3895 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003896
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003897 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003898 }
3899 }
3900
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003901 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3902
3903 // Try to send out capture result metadata.
3904 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003905 return;
3906
Thierry Strudel3d639192016-09-09 11:52:26 -07003907done_metadata:
3908 for (pendingRequestIterator i = mPendingRequestsList.begin();
3909 i != mPendingRequestsList.end() ;i++) {
3910 i->pipeline_depth++;
3911 }
3912 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3913 unblockRequestIfNecessary();
3914}
3915
3916/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003917 * FUNCTION : handleDepthDataWithLock
3918 *
3919 * DESCRIPTION: Handles incoming depth data
3920 *
3921 * PARAMETERS : @depthData : Depth data
3922 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003923 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003924 *
3925 * RETURN :
3926 *
3927 *==========================================================================*/
3928void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003929 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003930 uint32_t currentFrameNumber;
3931 buffer_handle_t *depthBuffer;
3932
3933 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003934 return;
3935 }
3936
3937 camera3_stream_buffer_t resultBuffer =
3938 {.acquire_fence = -1,
3939 .release_fence = -1,
3940 .status = CAMERA3_BUFFER_STATUS_OK,
3941 .buffer = nullptr,
3942 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003943 do {
3944 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3945 if (nullptr == depthBuffer) {
3946 break;
3947 }
3948
Emilian Peev7650c122017-01-19 08:24:33 -08003949 resultBuffer.buffer = depthBuffer;
3950 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003951 if (valid) {
3952 int32_t rc = mDepthChannel->populateDepthData(depthData,
3953 frameNumber);
3954 if (NO_ERROR != rc) {
3955 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3956 } else {
3957 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3958 }
Emilian Peev7650c122017-01-19 08:24:33 -08003959 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003960 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003961 }
3962 } else if (currentFrameNumber > frameNumber) {
3963 break;
3964 } else {
3965 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3966 {{currentFrameNumber, mDepthChannel->getStream(),
3967 CAMERA3_MSG_ERROR_BUFFER}}};
3968 orchestrateNotify(&notify_msg);
3969
3970 LOGE("Depth buffer for frame number: %d is missing "
3971 "returning back!", currentFrameNumber);
3972 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3973 }
3974 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003975 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003976 } while (currentFrameNumber < frameNumber);
3977}
3978
3979/*===========================================================================
3980 * FUNCTION : notifyErrorFoPendingDepthData
3981 *
3982 * DESCRIPTION: Returns error for any pending depth buffers
3983 *
3984 * PARAMETERS : depthCh - depth channel that needs to get flushed
3985 *
3986 * RETURN :
3987 *
3988 *==========================================================================*/
3989void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3990 QCamera3DepthChannel *depthCh) {
3991 uint32_t currentFrameNumber;
3992 buffer_handle_t *depthBuffer;
3993
3994 if (nullptr == depthCh) {
3995 return;
3996 }
3997
3998 camera3_notify_msg_t notify_msg =
3999 {.type = CAMERA3_MSG_ERROR,
4000 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4001 camera3_stream_buffer_t resultBuffer =
4002 {.acquire_fence = -1,
4003 .release_fence = -1,
4004 .buffer = nullptr,
4005 .stream = depthCh->getStream(),
4006 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004007
4008 while (nullptr !=
4009 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4010 depthCh->unmapBuffer(currentFrameNumber);
4011
4012 notify_msg.message.error.frame_number = currentFrameNumber;
4013 orchestrateNotify(&notify_msg);
4014
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004015 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004016 };
4017}
4018
4019/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004020 * FUNCTION : hdrPlusPerfLock
4021 *
4022 * DESCRIPTION: perf lock for HDR+ using custom intent
4023 *
4024 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4025 *
4026 * RETURN : None
4027 *
4028 *==========================================================================*/
4029void QCamera3HardwareInterface::hdrPlusPerfLock(
4030 mm_camera_super_buf_t *metadata_buf)
4031{
4032 if (NULL == metadata_buf) {
4033 LOGE("metadata_buf is NULL");
4034 return;
4035 }
4036 metadata_buffer_t *metadata =
4037 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4038 int32_t *p_frame_number_valid =
4039 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4040 uint32_t *p_frame_number =
4041 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4042
4043 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4044 LOGE("%s: Invalid metadata", __func__);
4045 return;
4046 }
4047
Wei Wang01385482017-08-03 10:49:34 -07004048 //acquire perf lock for 2 secs after the last HDR frame is captured
4049 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004050 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4051 if ((p_frame_number != NULL) &&
4052 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004053 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004054 }
4055 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004056}
4057
4058/*===========================================================================
4059 * FUNCTION : handleInputBufferWithLock
4060 *
4061 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4062 *
4063 * PARAMETERS : @frame_number: frame number of the input buffer
4064 *
4065 * RETURN :
4066 *
4067 *==========================================================================*/
4068void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4069{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004070 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004071 pendingRequestIterator i = mPendingRequestsList.begin();
4072 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4073 i++;
4074 }
4075 if (i != mPendingRequestsList.end() && i->input_buffer) {
4076 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004077 CameraMetadata settings;
4078 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4079 if(i->settings) {
4080 settings = i->settings;
4081 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4082 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004083 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004084 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004085 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004086 } else {
4087 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004088 }
4089
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004090 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4091 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4092 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004093
4094 camera3_capture_result result;
4095 memset(&result, 0, sizeof(camera3_capture_result));
4096 result.frame_number = frame_number;
4097 result.result = i->settings;
4098 result.input_buffer = i->input_buffer;
4099 result.partial_result = PARTIAL_RESULT_COUNT;
4100
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004101 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004102 LOGD("Input request metadata and input buffer frame_number = %u",
4103 i->frame_number);
4104 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004105
4106 // Dispatch result metadata that may be just unblocked by this reprocess result.
4107 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004108 } else {
4109 LOGE("Could not find input request for frame number %d", frame_number);
4110 }
4111}
4112
4113/*===========================================================================
4114 * FUNCTION : handleBufferWithLock
4115 *
4116 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4117 *
4118 * PARAMETERS : @buffer: image buffer for the callback
4119 * @frame_number: frame number of the image buffer
4120 *
4121 * RETURN :
4122 *
4123 *==========================================================================*/
4124void QCamera3HardwareInterface::handleBufferWithLock(
4125 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4126{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004127 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004128
4129 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4130 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4131 }
4132
Thierry Strudel3d639192016-09-09 11:52:26 -07004133 /* Nothing to be done during error state */
4134 if ((ERROR == mState) || (DEINIT == mState)) {
4135 return;
4136 }
4137 if (mFlushPerf) {
4138 handleBuffersDuringFlushLock(buffer);
4139 return;
4140 }
4141 //not in flush
4142 // If the frame number doesn't exist in the pending request list,
4143 // directly send the buffer to the frameworks, and update pending buffers map
4144 // Otherwise, book-keep the buffer.
4145 pendingRequestIterator i = mPendingRequestsList.begin();
4146 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4147 i++;
4148 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004149
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004150 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004151 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004152 // For a reprocessing request, try to send out result metadata.
4153 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004154 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004155 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004156
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004157 // Check if this frame was dropped.
4158 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4159 m != mPendingFrameDropList.end(); m++) {
4160 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4161 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4162 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4163 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4164 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4165 frame_number, streamID);
4166 m = mPendingFrameDropList.erase(m);
4167 break;
4168 }
4169 }
4170
4171 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4172 LOGH("result frame_number = %d, buffer = %p",
4173 frame_number, buffer->buffer);
4174
4175 mPendingBuffersMap.removeBuf(buffer->buffer);
4176 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4177
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004178 if (mPreviewStarted == false) {
4179 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4180 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004181 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4182
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004183 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4184 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4185 mPreviewStarted = true;
4186
4187 // Set power hint for preview
4188 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4189 }
4190 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004191}
4192
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004193void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004194 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004195{
4196 // Find the pending request for this result metadata.
4197 auto requestIter = mPendingRequestsList.begin();
4198 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4199 requestIter++;
4200 }
4201
4202 if (requestIter == mPendingRequestsList.end()) {
4203 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4204 return;
4205 }
4206
4207 // Update the result metadata
4208 requestIter->resultMetadata = resultMetadata;
4209
4210 // Check what type of request this is.
4211 bool liveRequest = false;
4212 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004213 // HDR+ request doesn't have partial results.
4214 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004215 } else if (requestIter->input_buffer != nullptr) {
4216 // Reprocessing request result is the same as settings.
4217 requestIter->resultMetadata = requestIter->settings;
4218 // Reprocessing request doesn't have partial results.
4219 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4220 } else {
4221 liveRequest = true;
4222 requestIter->partial_result_cnt++;
4223 mPendingLiveRequest--;
4224
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004225 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004226 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004227 // For a live request, send the metadata to HDR+ client.
4228 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4229 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4230 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4231 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004232 }
4233 }
4234
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004235 // Remove len shading map if it's not requested.
4236 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4237 CameraMetadata metadata;
4238 metadata.acquire(resultMetadata);
4239 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4240 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4241 &requestIter->requestedLensShadingMapMode, 1);
4242
4243 requestIter->resultMetadata = metadata.release();
4244 }
4245
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004246 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4247}
4248
4249void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4250 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004251 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4252 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004253 bool readyToSend = true;
4254
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004255 // Iterate through the pending requests to send out result metadata that are ready. Also if
4256 // this result metadata belongs to a live request, notify errors for previous live requests
4257 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004258 auto iter = mPendingRequestsList.begin();
4259 while (iter != mPendingRequestsList.end()) {
4260 // Check if current pending request is ready. If it's not ready, the following pending
4261 // requests are also not ready.
4262 if (readyToSend && iter->resultMetadata == nullptr) {
4263 readyToSend = false;
4264 }
4265
4266 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4267
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004268 camera3_capture_result_t result = {};
4269 result.frame_number = iter->frame_number;
4270 result.result = iter->resultMetadata;
4271 result.partial_result = iter->partial_result_cnt;
4272
4273 // If this pending buffer has result metadata, we may be able to send out shutter callback
4274 // and result metadata.
4275 if (iter->resultMetadata != nullptr) {
4276 if (!readyToSend) {
4277 // If any of the previous pending request is not ready, this pending request is
4278 // also not ready to send in order to keep shutter callbacks and result metadata
4279 // in order.
4280 iter++;
4281 continue;
4282 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004283 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004284 // If the result metadata belongs to a live request, notify errors for previous pending
4285 // live requests.
4286 mPendingLiveRequest--;
4287
4288 CameraMetadata dummyMetadata;
4289 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4290 result.result = dummyMetadata.release();
4291
4292 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004293
4294 // partial_result should be PARTIAL_RESULT_CNT in case of
4295 // ERROR_RESULT.
4296 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4297 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004298 } else {
4299 iter++;
4300 continue;
4301 }
4302
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004303 result.output_buffers = nullptr;
4304 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004305 orchestrateResult(&result);
4306
4307 // For reprocessing, result metadata is the same as settings so do not free it here to
4308 // avoid double free.
4309 if (result.result != iter->settings) {
4310 free_camera_metadata((camera_metadata_t *)result.result);
4311 }
4312 iter->resultMetadata = nullptr;
4313 iter = erasePendingRequest(iter);
4314 }
4315
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004316 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004317 for (auto &iter : mPendingRequestsList) {
4318 // Increment pipeline depth for the following pending requests.
4319 if (iter.frame_number > frameNumber) {
4320 iter.pipeline_depth++;
4321 }
4322 }
4323 }
4324
4325 unblockRequestIfNecessary();
4326}
4327
Thierry Strudel3d639192016-09-09 11:52:26 -07004328/*===========================================================================
4329 * FUNCTION : unblockRequestIfNecessary
4330 *
4331 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4332 * that mMutex is held when this function is called.
4333 *
4334 * PARAMETERS :
4335 *
4336 * RETURN :
4337 *
4338 *==========================================================================*/
4339void QCamera3HardwareInterface::unblockRequestIfNecessary()
4340{
4341 // Unblock process_capture_request
4342 pthread_cond_signal(&mRequestCond);
4343}
4344
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004345/*===========================================================================
4346 * FUNCTION : isHdrSnapshotRequest
4347 *
4348 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4349 *
4350 * PARAMETERS : camera3 request structure
4351 *
4352 * RETURN : boolean decision variable
4353 *
4354 *==========================================================================*/
4355bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4356{
4357 if (request == NULL) {
4358 LOGE("Invalid request handle");
4359 assert(0);
4360 return false;
4361 }
4362
4363 if (!mForceHdrSnapshot) {
4364 CameraMetadata frame_settings;
4365 frame_settings = request->settings;
4366
4367 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4368 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4369 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4370 return false;
4371 }
4372 } else {
4373 return false;
4374 }
4375
4376 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4377 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4378 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4379 return false;
4380 }
4381 } else {
4382 return false;
4383 }
4384 }
4385
4386 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4387 if (request->output_buffers[i].stream->format
4388 == HAL_PIXEL_FORMAT_BLOB) {
4389 return true;
4390 }
4391 }
4392
4393 return false;
4394}
4395/*===========================================================================
4396 * FUNCTION : orchestrateRequest
4397 *
4398 * DESCRIPTION: Orchestrates a capture request from camera service
4399 *
4400 * PARAMETERS :
4401 * @request : request from framework to process
4402 *
4403 * RETURN : Error status codes
4404 *
4405 *==========================================================================*/
4406int32_t QCamera3HardwareInterface::orchestrateRequest(
4407 camera3_capture_request_t *request)
4408{
4409
4410 uint32_t originalFrameNumber = request->frame_number;
4411 uint32_t originalOutputCount = request->num_output_buffers;
4412 const camera_metadata_t *original_settings = request->settings;
4413 List<InternalRequest> internallyRequestedStreams;
4414 List<InternalRequest> emptyInternalList;
4415
4416 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4417 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4418 uint32_t internalFrameNumber;
4419 CameraMetadata modified_meta;
4420
4421
4422 /* Add Blob channel to list of internally requested streams */
4423 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4424 if (request->output_buffers[i].stream->format
4425 == HAL_PIXEL_FORMAT_BLOB) {
4426 InternalRequest streamRequested;
4427 streamRequested.meteringOnly = 1;
4428 streamRequested.need_metadata = 0;
4429 streamRequested.stream = request->output_buffers[i].stream;
4430 internallyRequestedStreams.push_back(streamRequested);
4431 }
4432 }
4433 request->num_output_buffers = 0;
4434 auto itr = internallyRequestedStreams.begin();
4435
4436 /* Modify setting to set compensation */
4437 modified_meta = request->settings;
4438 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4439 uint8_t aeLock = 1;
4440 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4441 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4442 camera_metadata_t *modified_settings = modified_meta.release();
4443 request->settings = modified_settings;
4444
4445 /* Capture Settling & -2x frame */
4446 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4447 request->frame_number = internalFrameNumber;
4448 processCaptureRequest(request, internallyRequestedStreams);
4449
4450 request->num_output_buffers = originalOutputCount;
4451 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4452 request->frame_number = internalFrameNumber;
4453 processCaptureRequest(request, emptyInternalList);
4454 request->num_output_buffers = 0;
4455
4456 modified_meta = modified_settings;
4457 expCompensation = 0;
4458 aeLock = 1;
4459 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4460 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4461 modified_settings = modified_meta.release();
4462 request->settings = modified_settings;
4463
4464 /* Capture Settling & 0X frame */
4465
4466 itr = internallyRequestedStreams.begin();
4467 if (itr == internallyRequestedStreams.end()) {
4468 LOGE("Error Internally Requested Stream list is empty");
4469 assert(0);
4470 } else {
4471 itr->need_metadata = 0;
4472 itr->meteringOnly = 1;
4473 }
4474
4475 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4476 request->frame_number = internalFrameNumber;
4477 processCaptureRequest(request, internallyRequestedStreams);
4478
4479 itr = internallyRequestedStreams.begin();
4480 if (itr == internallyRequestedStreams.end()) {
4481 ALOGE("Error Internally Requested Stream list is empty");
4482 assert(0);
4483 } else {
4484 itr->need_metadata = 1;
4485 itr->meteringOnly = 0;
4486 }
4487
4488 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4489 request->frame_number = internalFrameNumber;
4490 processCaptureRequest(request, internallyRequestedStreams);
4491
4492 /* Capture 2X frame*/
4493 modified_meta = modified_settings;
4494 expCompensation = GB_HDR_2X_STEP_EV;
4495 aeLock = 1;
4496 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4497 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4498 modified_settings = modified_meta.release();
4499 request->settings = modified_settings;
4500
4501 itr = internallyRequestedStreams.begin();
4502 if (itr == internallyRequestedStreams.end()) {
4503 ALOGE("Error Internally Requested Stream list is empty");
4504 assert(0);
4505 } else {
4506 itr->need_metadata = 0;
4507 itr->meteringOnly = 1;
4508 }
4509 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4510 request->frame_number = internalFrameNumber;
4511 processCaptureRequest(request, internallyRequestedStreams);
4512
4513 itr = internallyRequestedStreams.begin();
4514 if (itr == internallyRequestedStreams.end()) {
4515 ALOGE("Error Internally Requested Stream list is empty");
4516 assert(0);
4517 } else {
4518 itr->need_metadata = 1;
4519 itr->meteringOnly = 0;
4520 }
4521
4522 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4523 request->frame_number = internalFrameNumber;
4524 processCaptureRequest(request, internallyRequestedStreams);
4525
4526
4527 /* Capture 2X on original streaming config*/
4528 internallyRequestedStreams.clear();
4529
4530 /* Restore original settings pointer */
4531 request->settings = original_settings;
4532 } else {
4533 uint32_t internalFrameNumber;
4534 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4535 request->frame_number = internalFrameNumber;
4536 return processCaptureRequest(request, internallyRequestedStreams);
4537 }
4538
4539 return NO_ERROR;
4540}
4541
4542/*===========================================================================
4543 * FUNCTION : orchestrateResult
4544 *
4545 * DESCRIPTION: Orchestrates a capture result to camera service
4546 *
4547 * PARAMETERS :
4548 * @request : request from framework to process
4549 *
4550 * RETURN :
4551 *
4552 *==========================================================================*/
4553void QCamera3HardwareInterface::orchestrateResult(
4554 camera3_capture_result_t *result)
4555{
4556 uint32_t frameworkFrameNumber;
4557 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4558 frameworkFrameNumber);
4559 if (rc != NO_ERROR) {
4560 LOGE("Cannot find translated frameworkFrameNumber");
4561 assert(0);
4562 } else {
4563 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004564 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004565 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004566 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004567 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4568 camera_metadata_entry_t entry;
4569 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4570 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004571 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004572 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4573 if (ret != OK)
4574 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004575 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004576 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004577 result->frame_number = frameworkFrameNumber;
4578 mCallbackOps->process_capture_result(mCallbackOps, result);
4579 }
4580 }
4581}
4582
4583/*===========================================================================
4584 * FUNCTION : orchestrateNotify
4585 *
4586 * DESCRIPTION: Orchestrates a notify to camera service
4587 *
4588 * PARAMETERS :
4589 * @request : request from framework to process
4590 *
4591 * RETURN :
4592 *
4593 *==========================================================================*/
4594void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4595{
4596 uint32_t frameworkFrameNumber;
4597 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004598 int32_t rc = NO_ERROR;
4599
4600 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004601 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004602
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004603 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004604 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4605 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4606 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004607 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004608 LOGE("Cannot find translated frameworkFrameNumber");
4609 assert(0);
4610 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004611 }
4612 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004613
4614 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4615 LOGD("Internal Request drop the notifyCb");
4616 } else {
4617 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4618 mCallbackOps->notify(mCallbackOps, notify_msg);
4619 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004620}
4621
4622/*===========================================================================
4623 * FUNCTION : FrameNumberRegistry
4624 *
4625 * DESCRIPTION: Constructor
4626 *
4627 * PARAMETERS :
4628 *
4629 * RETURN :
4630 *
4631 *==========================================================================*/
4632FrameNumberRegistry::FrameNumberRegistry()
4633{
4634 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4635}
4636
4637/*===========================================================================
4638 * FUNCTION : ~FrameNumberRegistry
4639 *
4640 * DESCRIPTION: Destructor
4641 *
4642 * PARAMETERS :
4643 *
4644 * RETURN :
4645 *
4646 *==========================================================================*/
4647FrameNumberRegistry::~FrameNumberRegistry()
4648{
4649}
4650
4651/*===========================================================================
4652 * FUNCTION : PurgeOldEntriesLocked
4653 *
4654 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4655 *
4656 * PARAMETERS :
4657 *
4658 * RETURN : NONE
4659 *
4660 *==========================================================================*/
4661void FrameNumberRegistry::purgeOldEntriesLocked()
4662{
4663 while (_register.begin() != _register.end()) {
4664 auto itr = _register.begin();
4665 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4666 _register.erase(itr);
4667 } else {
4668 return;
4669 }
4670 }
4671}
4672
4673/*===========================================================================
4674 * FUNCTION : allocStoreInternalFrameNumber
4675 *
4676 * DESCRIPTION: Method to note down a framework request and associate a new
4677 * internal request number against it
4678 *
4679 * PARAMETERS :
4680 * @fFrameNumber: Identifier given by framework
4681 * @internalFN : Output parameter which will have the newly generated internal
4682 * entry
4683 *
4684 * RETURN : Error code
4685 *
4686 *==========================================================================*/
4687int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4688 uint32_t &internalFrameNumber)
4689{
4690 Mutex::Autolock lock(mRegistryLock);
4691 internalFrameNumber = _nextFreeInternalNumber++;
4692 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4693 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4694 purgeOldEntriesLocked();
4695 return NO_ERROR;
4696}
4697
4698/*===========================================================================
4699 * FUNCTION : generateStoreInternalFrameNumber
4700 *
4701 * DESCRIPTION: Method to associate a new internal request number independent
4702 * of any associate with framework requests
4703 *
4704 * PARAMETERS :
4705 * @internalFrame#: Output parameter which will have the newly generated internal
4706 *
4707 *
4708 * RETURN : Error code
4709 *
4710 *==========================================================================*/
4711int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4712{
4713 Mutex::Autolock lock(mRegistryLock);
4714 internalFrameNumber = _nextFreeInternalNumber++;
4715 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4716 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4717 purgeOldEntriesLocked();
4718 return NO_ERROR;
4719}
4720
4721/*===========================================================================
4722 * FUNCTION : getFrameworkFrameNumber
4723 *
4724 * DESCRIPTION: Method to query the framework framenumber given an internal #
4725 *
4726 * PARAMETERS :
4727 * @internalFrame#: Internal reference
4728 * @frameworkframenumber: Output parameter holding framework frame entry
4729 *
4730 * RETURN : Error code
4731 *
4732 *==========================================================================*/
4733int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4734 uint32_t &frameworkFrameNumber)
4735{
4736 Mutex::Autolock lock(mRegistryLock);
4737 auto itr = _register.find(internalFrameNumber);
4738 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004739 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004740 return -ENOENT;
4741 }
4742
4743 frameworkFrameNumber = itr->second;
4744 purgeOldEntriesLocked();
4745 return NO_ERROR;
4746}
Thierry Strudel3d639192016-09-09 11:52:26 -07004747
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004748status_t QCamera3HardwareInterface::fillPbStreamConfig(
4749 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4750 QCamera3Channel *channel, uint32_t streamIndex) {
4751 if (config == nullptr) {
4752 LOGE("%s: config is null", __FUNCTION__);
4753 return BAD_VALUE;
4754 }
4755
4756 if (channel == nullptr) {
4757 LOGE("%s: channel is null", __FUNCTION__);
4758 return BAD_VALUE;
4759 }
4760
4761 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4762 if (stream == nullptr) {
4763 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4764 return NAME_NOT_FOUND;
4765 }
4766
4767 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4768 if (streamInfo == nullptr) {
4769 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4770 return NAME_NOT_FOUND;
4771 }
4772
4773 config->id = pbStreamId;
4774 config->image.width = streamInfo->dim.width;
4775 config->image.height = streamInfo->dim.height;
4776 config->image.padding = 0;
4777 config->image.format = pbStreamFormat;
4778
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004779 uint32_t totalPlaneSize = 0;
4780
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004781 // Fill plane information.
4782 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4783 pbcamera::PlaneConfiguration plane;
4784 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4785 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4786 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004787
4788 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004789 }
4790
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004791 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004792 return OK;
4793}
4794
Thierry Strudel3d639192016-09-09 11:52:26 -07004795/*===========================================================================
4796 * FUNCTION : processCaptureRequest
4797 *
4798 * DESCRIPTION: process a capture request from camera service
4799 *
4800 * PARAMETERS :
4801 * @request : request from framework to process
4802 *
4803 * RETURN :
4804 *
4805 *==========================================================================*/
4806int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004807 camera3_capture_request_t *request,
4808 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004809{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004810 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004811 int rc = NO_ERROR;
4812 int32_t request_id;
4813 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004814 bool isVidBufRequested = false;
4815 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004816 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004817
4818 pthread_mutex_lock(&mMutex);
4819
4820 // Validate current state
4821 switch (mState) {
4822 case CONFIGURED:
4823 case STARTED:
4824 /* valid state */
4825 break;
4826
4827 case ERROR:
4828 pthread_mutex_unlock(&mMutex);
4829 handleCameraDeviceError();
4830 return -ENODEV;
4831
4832 default:
4833 LOGE("Invalid state %d", mState);
4834 pthread_mutex_unlock(&mMutex);
4835 return -ENODEV;
4836 }
4837
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004838 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004839 if (rc != NO_ERROR) {
4840 LOGE("incoming request is not valid");
4841 pthread_mutex_unlock(&mMutex);
4842 return rc;
4843 }
4844
4845 meta = request->settings;
4846
4847 // For first capture request, send capture intent, and
4848 // stream on all streams
4849 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004850 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004851 // send an unconfigure to the backend so that the isp
4852 // resources are deallocated
4853 if (!mFirstConfiguration) {
4854 cam_stream_size_info_t stream_config_info;
4855 int32_t hal_version = CAM_HAL_V3;
4856 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4857 stream_config_info.buffer_info.min_buffers =
4858 MIN_INFLIGHT_REQUESTS;
4859 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004860 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004861 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004862 clear_metadata_buffer(mParameters);
4863 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4864 CAM_INTF_PARM_HAL_VERSION, hal_version);
4865 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4866 CAM_INTF_META_STREAM_INFO, stream_config_info);
4867 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4868 mParameters);
4869 if (rc < 0) {
4870 LOGE("set_parms for unconfigure failed");
4871 pthread_mutex_unlock(&mMutex);
4872 return rc;
4873 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004874
Thierry Strudel3d639192016-09-09 11:52:26 -07004875 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004876 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004877 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004878 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004879 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004880 property_get("persist.camera.is_type", is_type_value, "4");
4881 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4882 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4883 property_get("persist.camera.is_type_preview", is_type_value, "4");
4884 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4885 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004886
4887 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4888 int32_t hal_version = CAM_HAL_V3;
4889 uint8_t captureIntent =
4890 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4891 mCaptureIntent = captureIntent;
4892 clear_metadata_buffer(mParameters);
4893 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4894 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4895 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004896 if (mFirstConfiguration) {
4897 // configure instant AEC
4898 // Instant AEC is a session based parameter and it is needed only
4899 // once per complete session after open camera.
4900 // i.e. This is set only once for the first capture request, after open camera.
4901 setInstantAEC(meta);
4902 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004903 uint8_t fwkVideoStabMode=0;
4904 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4905 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4906 }
4907
Xue Tuecac74e2017-04-17 13:58:15 -07004908 // If EIS setprop is enabled then only turn it on for video/preview
4909 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004910 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004911 int32_t vsMode;
4912 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4913 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4914 rc = BAD_VALUE;
4915 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004916 LOGD("setEis %d", setEis);
4917 bool eis3Supported = false;
4918 size_t count = IS_TYPE_MAX;
4919 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4920 for (size_t i = 0; i < count; i++) {
4921 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4922 eis3Supported = true;
4923 break;
4924 }
4925 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004926
4927 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004928 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4930 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004931 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4932 is_type = isTypePreview;
4933 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4934 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4935 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004936 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004937 } else {
4938 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004939 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004940 } else {
4941 is_type = IS_TYPE_NONE;
4942 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004943 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004944 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004945 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4946 }
4947 }
4948
4949 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4950 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4951
Thierry Strudel54dc9782017-02-15 12:12:10 -08004952 //Disable tintless only if the property is set to 0
4953 memset(prop, 0, sizeof(prop));
4954 property_get("persist.camera.tintless.enable", prop, "1");
4955 int32_t tintless_value = atoi(prop);
4956
Thierry Strudel3d639192016-09-09 11:52:26 -07004957 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4958 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004959
Thierry Strudel3d639192016-09-09 11:52:26 -07004960 //Disable CDS for HFR mode or if DIS/EIS is on.
4961 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4962 //after every configure_stream
4963 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4964 (m_bIsVideo)) {
4965 int32_t cds = CAM_CDS_MODE_OFF;
4966 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4967 CAM_INTF_PARM_CDS_MODE, cds))
4968 LOGE("Failed to disable CDS for HFR mode");
4969
4970 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004971
4972 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4973 uint8_t* use_av_timer = NULL;
4974
4975 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004976 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004977 use_av_timer = &m_debug_avtimer;
4978 }
4979 else{
4980 use_av_timer =
4981 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004982 if (use_av_timer) {
4983 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4984 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004985 }
4986
4987 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4988 rc = BAD_VALUE;
4989 }
4990 }
4991
Thierry Strudel3d639192016-09-09 11:52:26 -07004992 setMobicat();
4993
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004994 uint8_t nrMode = 0;
4995 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4996 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4997 }
4998
Thierry Strudel3d639192016-09-09 11:52:26 -07004999 /* Set fps and hfr mode while sending meta stream info so that sensor
5000 * can configure appropriate streaming mode */
5001 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005002 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5003 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5005 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005006 if (rc == NO_ERROR) {
5007 int32_t max_fps =
5008 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005009 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005010 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5011 }
5012 /* For HFR, more buffers are dequeued upfront to improve the performance */
5013 if (mBatchSize) {
5014 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5015 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5016 }
5017 }
5018 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005019 LOGE("setHalFpsRange failed");
5020 }
5021 }
5022 if (meta.exists(ANDROID_CONTROL_MODE)) {
5023 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5024 rc = extractSceneMode(meta, metaMode, mParameters);
5025 if (rc != NO_ERROR) {
5026 LOGE("extractSceneMode failed");
5027 }
5028 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005029 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005030
Thierry Strudel04e026f2016-10-10 11:27:36 -07005031 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5032 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5033 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5034 rc = setVideoHdrMode(mParameters, vhdr);
5035 if (rc != NO_ERROR) {
5036 LOGE("setVideoHDR is failed");
5037 }
5038 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005039
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005040 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005041 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005042 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005043 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5044 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5045 sensorModeFullFov)) {
5046 rc = BAD_VALUE;
5047 }
5048 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005049 //TODO: validate the arguments, HSV scenemode should have only the
5050 //advertised fps ranges
5051
5052 /*set the capture intent, hal version, tintless, stream info,
5053 *and disenable parameters to the backend*/
5054 LOGD("set_parms META_STREAM_INFO " );
5055 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005056 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5057 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 mStreamConfigInfo.type[i],
5059 mStreamConfigInfo.stream_sizes[i].width,
5060 mStreamConfigInfo.stream_sizes[i].height,
5061 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005062 mStreamConfigInfo.format[i],
5063 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005064 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005065
Thierry Strudel3d639192016-09-09 11:52:26 -07005066 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5067 mParameters);
5068 if (rc < 0) {
5069 LOGE("set_parms failed for hal version, stream info");
5070 }
5071
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005072 cam_sensor_mode_info_t sensorModeInfo = {};
5073 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005074 if (rc != NO_ERROR) {
5075 LOGE("Failed to get sensor output size");
5076 pthread_mutex_unlock(&mMutex);
5077 goto error_exit;
5078 }
5079
5080 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5081 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005082 sensorModeInfo.active_array_size.width,
5083 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005084
5085 /* Set batchmode before initializing channel. Since registerBuffer
5086 * internally initializes some of the channels, better set batchmode
5087 * even before first register buffer */
5088 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5089 it != mStreamInfo.end(); it++) {
5090 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5091 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5092 && mBatchSize) {
5093 rc = channel->setBatchSize(mBatchSize);
5094 //Disable per frame map unmap for HFR/batchmode case
5095 rc |= channel->setPerFrameMapUnmap(false);
5096 if (NO_ERROR != rc) {
5097 LOGE("Channel init failed %d", rc);
5098 pthread_mutex_unlock(&mMutex);
5099 goto error_exit;
5100 }
5101 }
5102 }
5103
5104 //First initialize all streams
5105 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5106 it != mStreamInfo.end(); it++) {
5107 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005108
5109 /* Initial value of NR mode is needed before stream on */
5110 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005111 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5112 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005113 setEis) {
5114 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5115 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5116 is_type = mStreamConfigInfo.is_type[i];
5117 break;
5118 }
5119 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005120 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005121 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005122 rc = channel->initialize(IS_TYPE_NONE);
5123 }
5124 if (NO_ERROR != rc) {
5125 LOGE("Channel initialization failed %d", rc);
5126 pthread_mutex_unlock(&mMutex);
5127 goto error_exit;
5128 }
5129 }
5130
5131 if (mRawDumpChannel) {
5132 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5133 if (rc != NO_ERROR) {
5134 LOGE("Error: Raw Dump Channel init failed");
5135 pthread_mutex_unlock(&mMutex);
5136 goto error_exit;
5137 }
5138 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005139 if (mHdrPlusRawSrcChannel) {
5140 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5141 if (rc != NO_ERROR) {
5142 LOGE("Error: HDR+ RAW Source Channel init failed");
5143 pthread_mutex_unlock(&mMutex);
5144 goto error_exit;
5145 }
5146 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005147 if (mSupportChannel) {
5148 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5149 if (rc < 0) {
5150 LOGE("Support channel initialization failed");
5151 pthread_mutex_unlock(&mMutex);
5152 goto error_exit;
5153 }
5154 }
5155 if (mAnalysisChannel) {
5156 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5157 if (rc < 0) {
5158 LOGE("Analysis channel initialization failed");
5159 pthread_mutex_unlock(&mMutex);
5160 goto error_exit;
5161 }
5162 }
5163 if (mDummyBatchChannel) {
5164 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5165 if (rc < 0) {
5166 LOGE("mDummyBatchChannel setBatchSize failed");
5167 pthread_mutex_unlock(&mMutex);
5168 goto error_exit;
5169 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005170 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005171 if (rc < 0) {
5172 LOGE("mDummyBatchChannel initialization failed");
5173 pthread_mutex_unlock(&mMutex);
5174 goto error_exit;
5175 }
5176 }
5177
5178 // Set bundle info
5179 rc = setBundleInfo();
5180 if (rc < 0) {
5181 LOGE("setBundleInfo failed %d", rc);
5182 pthread_mutex_unlock(&mMutex);
5183 goto error_exit;
5184 }
5185
5186 //update settings from app here
5187 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5188 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5189 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5190 }
5191 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5192 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5193 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5194 }
5195 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5196 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5197 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5198
5199 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5200 (mLinkedCameraId != mCameraId) ) {
5201 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5202 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005203 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005204 goto error_exit;
5205 }
5206 }
5207
5208 // add bundle related cameras
5209 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5210 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005211 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5212 &m_pDualCamCmdPtr->bundle_info;
5213 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005214 if (mIsDeviceLinked)
5215 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5216 else
5217 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5218
5219 pthread_mutex_lock(&gCamLock);
5220
5221 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5222 LOGE("Dualcam: Invalid Session Id ");
5223 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005224 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005225 goto error_exit;
5226 }
5227
5228 if (mIsMainCamera == 1) {
5229 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5230 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005231 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005232 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005233 // related session id should be session id of linked session
5234 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5235 } else {
5236 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5237 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005238 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005239 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005240 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5241 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005242 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005243 pthread_mutex_unlock(&gCamLock);
5244
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005245 rc = mCameraHandle->ops->set_dual_cam_cmd(
5246 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005247 if (rc < 0) {
5248 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005249 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005250 goto error_exit;
5251 }
5252 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005253 goto no_error;
5254error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005255 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005256 return rc;
5257no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005258 mWokenUpByDaemon = false;
5259 mPendingLiveRequest = 0;
5260 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005261 }
5262
5263 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005264 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005265
5266 if (mFlushPerf) {
5267 //we cannot accept any requests during flush
5268 LOGE("process_capture_request cannot proceed during flush");
5269 pthread_mutex_unlock(&mMutex);
5270 return NO_ERROR; //should return an error
5271 }
5272
5273 if (meta.exists(ANDROID_REQUEST_ID)) {
5274 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5275 mCurrentRequestId = request_id;
5276 LOGD("Received request with id: %d", request_id);
5277 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5278 LOGE("Unable to find request id field, \
5279 & no previous id available");
5280 pthread_mutex_unlock(&mMutex);
5281 return NAME_NOT_FOUND;
5282 } else {
5283 LOGD("Re-using old request id");
5284 request_id = mCurrentRequestId;
5285 }
5286
5287 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5288 request->num_output_buffers,
5289 request->input_buffer,
5290 frameNumber);
5291 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005292 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005293 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005294 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005295 uint32_t snapshotStreamId = 0;
5296 for (size_t i = 0; i < request->num_output_buffers; i++) {
5297 const camera3_stream_buffer_t& output = request->output_buffers[i];
5298 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5299
Emilian Peev7650c122017-01-19 08:24:33 -08005300 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5301 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005302 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005303 blob_request = 1;
5304 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5305 }
5306
5307 if (output.acquire_fence != -1) {
5308 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5309 close(output.acquire_fence);
5310 if (rc != OK) {
5311 LOGE("sync wait failed %d", rc);
5312 pthread_mutex_unlock(&mMutex);
5313 return rc;
5314 }
5315 }
5316
Emilian Peev0f3c3162017-03-15 12:57:46 +00005317 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5318 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005319 depthRequestPresent = true;
5320 continue;
5321 }
5322
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005323 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005324 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005325
5326 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5327 isVidBufRequested = true;
5328 }
5329 }
5330
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005331 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5332 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5333 itr++) {
5334 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5335 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5336 channel->getStreamID(channel->getStreamTypeMask());
5337
5338 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5339 isVidBufRequested = true;
5340 }
5341 }
5342
Thierry Strudel3d639192016-09-09 11:52:26 -07005343 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005344 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005345 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005346 }
5347 if (blob_request && mRawDumpChannel) {
5348 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005349 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005350 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005351 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005352 }
5353
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005354 {
5355 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5356 // Request a RAW buffer if
5357 // 1. mHdrPlusRawSrcChannel is valid.
5358 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5359 // 3. There is no pending HDR+ request.
5360 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5361 mHdrPlusPendingRequests.size() == 0) {
5362 streamsArray.stream_request[streamsArray.num_streams].streamID =
5363 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5364 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5365 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005366 }
5367
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005368 //extract capture intent
5369 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5370 mCaptureIntent =
5371 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5372 }
5373
5374 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5375 mCacMode =
5376 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5377 }
5378
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005379 uint8_t requestedLensShadingMapMode;
5380 // Get the shading map mode.
5381 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5382 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5383 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5384 } else {
5385 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5386 }
5387
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005388 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005389 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005390
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005391 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005392 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005393 // If this request has a still capture intent, try to submit an HDR+ request.
5394 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5395 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5396 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5397 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005398 }
5399
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005400 if (hdrPlusRequest) {
5401 // For a HDR+ request, just set the frame parameters.
5402 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5403 if (rc < 0) {
5404 LOGE("fail to set frame parameters");
5405 pthread_mutex_unlock(&mMutex);
5406 return rc;
5407 }
5408 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005409 /* Parse the settings:
5410 * - For every request in NORMAL MODE
5411 * - For every request in HFR mode during preview only case
5412 * - For first request of every batch in HFR mode during video
5413 * recording. In batchmode the same settings except frame number is
5414 * repeated in each request of the batch.
5415 */
5416 if (!mBatchSize ||
5417 (mBatchSize && !isVidBufRequested) ||
5418 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005419 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005420 if (rc < 0) {
5421 LOGE("fail to set frame parameters");
5422 pthread_mutex_unlock(&mMutex);
5423 return rc;
5424 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005425
5426 {
5427 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5428 // will be reported in result metadata.
5429 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5430 if (mHdrPlusModeEnabled) {
5431 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5432 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5433 }
5434 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005435 }
5436 /* For batchMode HFR, setFrameParameters is not called for every
5437 * request. But only frame number of the latest request is parsed.
5438 * Keep track of first and last frame numbers in a batch so that
5439 * metadata for the frame numbers of batch can be duplicated in
5440 * handleBatchMetadta */
5441 if (mBatchSize) {
5442 if (!mToBeQueuedVidBufs) {
5443 //start of the batch
5444 mFirstFrameNumberInBatch = request->frame_number;
5445 }
5446 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5447 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5448 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005449 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005450 return BAD_VALUE;
5451 }
5452 }
5453 if (mNeedSensorRestart) {
5454 /* Unlock the mutex as restartSensor waits on the channels to be
5455 * stopped, which in turn calls stream callback functions -
5456 * handleBufferWithLock and handleMetadataWithLock */
5457 pthread_mutex_unlock(&mMutex);
5458 rc = dynamicUpdateMetaStreamInfo();
5459 if (rc != NO_ERROR) {
5460 LOGE("Restarting the sensor failed");
5461 return BAD_VALUE;
5462 }
5463 mNeedSensorRestart = false;
5464 pthread_mutex_lock(&mMutex);
5465 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005466 if(mResetInstantAEC) {
5467 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5468 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5469 mResetInstantAEC = false;
5470 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005471 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005472 if (request->input_buffer->acquire_fence != -1) {
5473 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5474 close(request->input_buffer->acquire_fence);
5475 if (rc != OK) {
5476 LOGE("input buffer sync wait failed %d", rc);
5477 pthread_mutex_unlock(&mMutex);
5478 return rc;
5479 }
5480 }
5481 }
5482
5483 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5484 mLastCustIntentFrmNum = frameNumber;
5485 }
5486 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005487 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005488 pendingRequestIterator latestRequest;
5489 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005490 pendingRequest.num_buffers = depthRequestPresent ?
5491 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005492 pendingRequest.request_id = request_id;
5493 pendingRequest.blob_request = blob_request;
5494 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005495 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005496 if (request->input_buffer) {
5497 pendingRequest.input_buffer =
5498 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5499 *(pendingRequest.input_buffer) = *(request->input_buffer);
5500 pInputBuffer = pendingRequest.input_buffer;
5501 } else {
5502 pendingRequest.input_buffer = NULL;
5503 pInputBuffer = NULL;
5504 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005505 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005506
5507 pendingRequest.pipeline_depth = 0;
5508 pendingRequest.partial_result_cnt = 0;
5509 extractJpegMetadata(mCurJpegMeta, request);
5510 pendingRequest.jpegMetadata = mCurJpegMeta;
5511 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005512 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005513 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5514 mHybridAeEnable =
5515 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5516 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005517
5518 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5519 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005520 /* DevCamDebug metadata processCaptureRequest */
5521 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5522 mDevCamDebugMetaEnable =
5523 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5524 }
5525 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5526 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005527
5528 //extract CAC info
5529 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5530 mCacMode =
5531 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5532 }
5533 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005534 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005535 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5536 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005537
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005538 // extract enableZsl info
5539 if (gExposeEnableZslKey) {
5540 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5541 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5542 mZslEnabled = pendingRequest.enableZsl;
5543 } else {
5544 pendingRequest.enableZsl = mZslEnabled;
5545 }
5546 }
5547
Thierry Strudel3d639192016-09-09 11:52:26 -07005548 PendingBuffersInRequest bufsForCurRequest;
5549 bufsForCurRequest.frame_number = frameNumber;
5550 // Mark current timestamp for the new request
5551 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005552 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005553
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005554 if (hdrPlusRequest) {
5555 // Save settings for this request.
5556 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5557 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5558
5559 // Add to pending HDR+ request queue.
5560 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5561 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5562
5563 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5564 }
5565
Thierry Strudel3d639192016-09-09 11:52:26 -07005566 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005567 if ((request->output_buffers[i].stream->data_space ==
5568 HAL_DATASPACE_DEPTH) &&
5569 (HAL_PIXEL_FORMAT_BLOB ==
5570 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005571 continue;
5572 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005573 RequestedBufferInfo requestedBuf;
5574 memset(&requestedBuf, 0, sizeof(requestedBuf));
5575 requestedBuf.stream = request->output_buffers[i].stream;
5576 requestedBuf.buffer = NULL;
5577 pendingRequest.buffers.push_back(requestedBuf);
5578
5579 // Add to buffer handle the pending buffers list
5580 PendingBufferInfo bufferInfo;
5581 bufferInfo.buffer = request->output_buffers[i].buffer;
5582 bufferInfo.stream = request->output_buffers[i].stream;
5583 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5584 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5585 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5586 frameNumber, bufferInfo.buffer,
5587 channel->getStreamTypeMask(), bufferInfo.stream->format);
5588 }
5589 // Add this request packet into mPendingBuffersMap
5590 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5591 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5592 mPendingBuffersMap.get_num_overall_buffers());
5593
5594 latestRequest = mPendingRequestsList.insert(
5595 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005596
5597 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5598 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005599 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005600 for (size_t i = 0; i < request->num_output_buffers; i++) {
5601 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5602 }
5603
Thierry Strudel3d639192016-09-09 11:52:26 -07005604 if(mFlush) {
5605 LOGI("mFlush is true");
5606 pthread_mutex_unlock(&mMutex);
5607 return NO_ERROR;
5608 }
5609
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005610 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5611 // channel.
5612 if (!hdrPlusRequest) {
5613 int indexUsed;
5614 // Notify metadata channel we receive a request
5615 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005616
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005617 if(request->input_buffer != NULL){
5618 LOGD("Input request, frame_number %d", frameNumber);
5619 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5620 if (NO_ERROR != rc) {
5621 LOGE("fail to set reproc parameters");
5622 pthread_mutex_unlock(&mMutex);
5623 return rc;
5624 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005625 }
5626
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005627 // Call request on other streams
5628 uint32_t streams_need_metadata = 0;
5629 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5630 for (size_t i = 0; i < request->num_output_buffers; i++) {
5631 const camera3_stream_buffer_t& output = request->output_buffers[i];
5632 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5633
5634 if (channel == NULL) {
5635 LOGW("invalid channel pointer for stream");
5636 continue;
5637 }
5638
5639 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5640 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5641 output.buffer, request->input_buffer, frameNumber);
5642 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005643 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005644 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5645 if (rc < 0) {
5646 LOGE("Fail to request on picture channel");
5647 pthread_mutex_unlock(&mMutex);
5648 return rc;
5649 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005650 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005651 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5652 assert(NULL != mDepthChannel);
5653 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005654
Emilian Peev7650c122017-01-19 08:24:33 -08005655 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5656 if (rc < 0) {
5657 LOGE("Fail to map on depth buffer");
5658 pthread_mutex_unlock(&mMutex);
5659 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005660 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005661 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005662 } else {
5663 LOGD("snapshot request with buffer %p, frame_number %d",
5664 output.buffer, frameNumber);
5665 if (!request->settings) {
5666 rc = channel->request(output.buffer, frameNumber,
5667 NULL, mPrevParameters, indexUsed);
5668 } else {
5669 rc = channel->request(output.buffer, frameNumber,
5670 NULL, mParameters, indexUsed);
5671 }
5672 if (rc < 0) {
5673 LOGE("Fail to request on picture channel");
5674 pthread_mutex_unlock(&mMutex);
5675 return rc;
5676 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005677
Emilian Peev7650c122017-01-19 08:24:33 -08005678 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5679 uint32_t j = 0;
5680 for (j = 0; j < streamsArray.num_streams; j++) {
5681 if (streamsArray.stream_request[j].streamID == streamId) {
5682 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5683 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5684 else
5685 streamsArray.stream_request[j].buf_index = indexUsed;
5686 break;
5687 }
5688 }
5689 if (j == streamsArray.num_streams) {
5690 LOGE("Did not find matching stream to update index");
5691 assert(0);
5692 }
5693
5694 pendingBufferIter->need_metadata = true;
5695 streams_need_metadata++;
5696 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005697 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005698 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5699 bool needMetadata = false;
5700 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5701 rc = yuvChannel->request(output.buffer, frameNumber,
5702 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5703 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005704 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005705 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005706 pthread_mutex_unlock(&mMutex);
5707 return rc;
5708 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005709
5710 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5711 uint32_t j = 0;
5712 for (j = 0; j < streamsArray.num_streams; j++) {
5713 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005714 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5715 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5716 else
5717 streamsArray.stream_request[j].buf_index = indexUsed;
5718 break;
5719 }
5720 }
5721 if (j == streamsArray.num_streams) {
5722 LOGE("Did not find matching stream to update index");
5723 assert(0);
5724 }
5725
5726 pendingBufferIter->need_metadata = needMetadata;
5727 if (needMetadata)
5728 streams_need_metadata += 1;
5729 LOGD("calling YUV channel request, need_metadata is %d",
5730 needMetadata);
5731 } else {
5732 LOGD("request with buffer %p, frame_number %d",
5733 output.buffer, frameNumber);
5734
5735 rc = channel->request(output.buffer, frameNumber, indexUsed);
5736
5737 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5738 uint32_t j = 0;
5739 for (j = 0; j < streamsArray.num_streams; j++) {
5740 if (streamsArray.stream_request[j].streamID == streamId) {
5741 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5742 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5743 else
5744 streamsArray.stream_request[j].buf_index = indexUsed;
5745 break;
5746 }
5747 }
5748 if (j == streamsArray.num_streams) {
5749 LOGE("Did not find matching stream to update index");
5750 assert(0);
5751 }
5752
5753 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5754 && mBatchSize) {
5755 mToBeQueuedVidBufs++;
5756 if (mToBeQueuedVidBufs == mBatchSize) {
5757 channel->queueBatchBuf();
5758 }
5759 }
5760 if (rc < 0) {
5761 LOGE("request failed");
5762 pthread_mutex_unlock(&mMutex);
5763 return rc;
5764 }
5765 }
5766 pendingBufferIter++;
5767 }
5768
5769 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5770 itr++) {
5771 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5772
5773 if (channel == NULL) {
5774 LOGE("invalid channel pointer for stream");
5775 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005776 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005777 return BAD_VALUE;
5778 }
5779
5780 InternalRequest requestedStream;
5781 requestedStream = (*itr);
5782
5783
5784 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5785 LOGD("snapshot request internally input buffer %p, frame_number %d",
5786 request->input_buffer, frameNumber);
5787 if(request->input_buffer != NULL){
5788 rc = channel->request(NULL, frameNumber,
5789 pInputBuffer, &mReprocMeta, indexUsed, true,
5790 requestedStream.meteringOnly);
5791 if (rc < 0) {
5792 LOGE("Fail to request on picture channel");
5793 pthread_mutex_unlock(&mMutex);
5794 return rc;
5795 }
5796 } else {
5797 LOGD("snapshot request with frame_number %d", frameNumber);
5798 if (!request->settings) {
5799 rc = channel->request(NULL, frameNumber,
5800 NULL, mPrevParameters, indexUsed, true,
5801 requestedStream.meteringOnly);
5802 } else {
5803 rc = channel->request(NULL, frameNumber,
5804 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5805 }
5806 if (rc < 0) {
5807 LOGE("Fail to request on picture channel");
5808 pthread_mutex_unlock(&mMutex);
5809 return rc;
5810 }
5811
5812 if ((*itr).meteringOnly != 1) {
5813 requestedStream.need_metadata = 1;
5814 streams_need_metadata++;
5815 }
5816 }
5817
5818 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5819 uint32_t j = 0;
5820 for (j = 0; j < streamsArray.num_streams; j++) {
5821 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005822 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5823 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5824 else
5825 streamsArray.stream_request[j].buf_index = indexUsed;
5826 break;
5827 }
5828 }
5829 if (j == streamsArray.num_streams) {
5830 LOGE("Did not find matching stream to update index");
5831 assert(0);
5832 }
5833
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005834 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005835 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005836 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005837 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005838 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005839 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005840 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005841 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005842
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005843 //If 2 streams have need_metadata set to true, fail the request, unless
5844 //we copy/reference count the metadata buffer
5845 if (streams_need_metadata > 1) {
5846 LOGE("not supporting request in which two streams requires"
5847 " 2 HAL metadata for reprocessing");
5848 pthread_mutex_unlock(&mMutex);
5849 return -EINVAL;
5850 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005851
Emilian Peev656e4fa2017-06-02 16:47:04 +01005852 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5853 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5854 if (depthRequestPresent && mDepthChannel) {
5855 if (request->settings) {
5856 camera_metadata_ro_entry entry;
5857 if (find_camera_metadata_ro_entry(request->settings,
5858 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5859 if (entry.data.u8[0]) {
5860 pdafEnable = CAM_PD_DATA_ENABLED;
5861 } else {
5862 pdafEnable = CAM_PD_DATA_SKIP;
5863 }
5864 mDepthCloudMode = pdafEnable;
5865 } else {
5866 pdafEnable = mDepthCloudMode;
5867 }
5868 } else {
5869 pdafEnable = mDepthCloudMode;
5870 }
5871 }
5872
Emilian Peev7650c122017-01-19 08:24:33 -08005873 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5874 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5875 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5876 pthread_mutex_unlock(&mMutex);
5877 return BAD_VALUE;
5878 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005879
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005880 if (request->input_buffer == NULL) {
5881 /* Set the parameters to backend:
5882 * - For every request in NORMAL MODE
5883 * - For every request in HFR mode during preview only case
5884 * - Once every batch in HFR mode during video recording
5885 */
5886 if (!mBatchSize ||
5887 (mBatchSize && !isVidBufRequested) ||
5888 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5889 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5890 mBatchSize, isVidBufRequested,
5891 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005892
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005893 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5894 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5895 uint32_t m = 0;
5896 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5897 if (streamsArray.stream_request[k].streamID ==
5898 mBatchedStreamsArray.stream_request[m].streamID)
5899 break;
5900 }
5901 if (m == mBatchedStreamsArray.num_streams) {
5902 mBatchedStreamsArray.stream_request\
5903 [mBatchedStreamsArray.num_streams].streamID =
5904 streamsArray.stream_request[k].streamID;
5905 mBatchedStreamsArray.stream_request\
5906 [mBatchedStreamsArray.num_streams].buf_index =
5907 streamsArray.stream_request[k].buf_index;
5908 mBatchedStreamsArray.num_streams =
5909 mBatchedStreamsArray.num_streams + 1;
5910 }
5911 }
5912 streamsArray = mBatchedStreamsArray;
5913 }
5914 /* Update stream id of all the requested buffers */
5915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5916 streamsArray)) {
5917 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005918 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005919 return BAD_VALUE;
5920 }
5921
5922 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5923 mParameters);
5924 if (rc < 0) {
5925 LOGE("set_parms failed");
5926 }
5927 /* reset to zero coz, the batch is queued */
5928 mToBeQueuedVidBufs = 0;
5929 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5930 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5931 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005932 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5933 uint32_t m = 0;
5934 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5935 if (streamsArray.stream_request[k].streamID ==
5936 mBatchedStreamsArray.stream_request[m].streamID)
5937 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005938 }
5939 if (m == mBatchedStreamsArray.num_streams) {
5940 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5941 streamID = streamsArray.stream_request[k].streamID;
5942 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5943 buf_index = streamsArray.stream_request[k].buf_index;
5944 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5945 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005946 }
5947 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005948 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005949
5950 // Start all streams after the first setting is sent, so that the
5951 // setting can be applied sooner: (0 + apply_delay)th frame.
5952 if (mState == CONFIGURED && mChannelHandle) {
5953 //Then start them.
5954 LOGH("Start META Channel");
5955 rc = mMetadataChannel->start();
5956 if (rc < 0) {
5957 LOGE("META channel start failed");
5958 pthread_mutex_unlock(&mMutex);
5959 return rc;
5960 }
5961
5962 if (mAnalysisChannel) {
5963 rc = mAnalysisChannel->start();
5964 if (rc < 0) {
5965 LOGE("Analysis channel start failed");
5966 mMetadataChannel->stop();
5967 pthread_mutex_unlock(&mMutex);
5968 return rc;
5969 }
5970 }
5971
5972 if (mSupportChannel) {
5973 rc = mSupportChannel->start();
5974 if (rc < 0) {
5975 LOGE("Support channel start failed");
5976 mMetadataChannel->stop();
5977 /* Although support and analysis are mutually exclusive today
5978 adding it in anycase for future proofing */
5979 if (mAnalysisChannel) {
5980 mAnalysisChannel->stop();
5981 }
5982 pthread_mutex_unlock(&mMutex);
5983 return rc;
5984 }
5985 }
5986 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5987 it != mStreamInfo.end(); it++) {
5988 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5989 LOGH("Start Processing Channel mask=%d",
5990 channel->getStreamTypeMask());
5991 rc = channel->start();
5992 if (rc < 0) {
5993 LOGE("channel start failed");
5994 pthread_mutex_unlock(&mMutex);
5995 return rc;
5996 }
5997 }
5998
5999 if (mRawDumpChannel) {
6000 LOGD("Starting raw dump stream");
6001 rc = mRawDumpChannel->start();
6002 if (rc != NO_ERROR) {
6003 LOGE("Error Starting Raw Dump Channel");
6004 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6005 it != mStreamInfo.end(); it++) {
6006 QCamera3Channel *channel =
6007 (QCamera3Channel *)(*it)->stream->priv;
6008 LOGH("Stopping Processing Channel mask=%d",
6009 channel->getStreamTypeMask());
6010 channel->stop();
6011 }
6012 if (mSupportChannel)
6013 mSupportChannel->stop();
6014 if (mAnalysisChannel) {
6015 mAnalysisChannel->stop();
6016 }
6017 mMetadataChannel->stop();
6018 pthread_mutex_unlock(&mMutex);
6019 return rc;
6020 }
6021 }
6022
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006023 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006024 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006025 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006026 if (rc != NO_ERROR) {
6027 LOGE("start_channel failed %d", rc);
6028 pthread_mutex_unlock(&mMutex);
6029 return rc;
6030 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006031
6032 {
6033 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006034 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07006035
6036 // Now that sensor mode should have been selected, get the selected sensor mode
6037 // info.
6038 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6039 getCurrentSensorModeInfo(mSensorModeInfo);
6040
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006041 if (EaselManagerClientOpened) {
6042 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006043 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6044 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006045 if (rc != OK) {
6046 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6047 mCameraId, mSensorModeInfo.op_pixel_clk);
6048 pthread_mutex_unlock(&mMutex);
6049 return rc;
6050 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006051 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006052 }
6053 }
6054
6055 // Start sensor streaming.
6056 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6057 mChannelHandle);
6058 if (rc != NO_ERROR) {
6059 LOGE("start_sensor_stream_on failed %d", rc);
6060 pthread_mutex_unlock(&mMutex);
6061 return rc;
6062 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006063 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006064 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006065 }
6066
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006067 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006068 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006069 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006070 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006071 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6072 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6073 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6074 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006075
6076 if (isSessionHdrPlusModeCompatible()) {
6077 rc = enableHdrPlusModeLocked();
6078 if (rc != OK) {
6079 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6080 pthread_mutex_unlock(&mMutex);
6081 return rc;
6082 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006083 }
6084
6085 mFirstPreviewIntentSeen = true;
6086 }
6087 }
6088
Thierry Strudel3d639192016-09-09 11:52:26 -07006089 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6090
6091 mState = STARTED;
6092 // Added a timed condition wait
6093 struct timespec ts;
6094 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006095 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006096 if (rc < 0) {
6097 isValidTimeout = 0;
6098 LOGE("Error reading the real time clock!!");
6099 }
6100 else {
6101 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006102 int64_t timeout = 5;
6103 {
6104 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6105 // If there is a pending HDR+ request, the following requests may be blocked until the
6106 // HDR+ request is done. So allow a longer timeout.
6107 if (mHdrPlusPendingRequests.size() > 0) {
6108 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6109 }
6110 }
6111 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006112 }
6113 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006114 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006115 (mState != ERROR) && (mState != DEINIT)) {
6116 if (!isValidTimeout) {
6117 LOGD("Blocking on conditional wait");
6118 pthread_cond_wait(&mRequestCond, &mMutex);
6119 }
6120 else {
6121 LOGD("Blocking on timed conditional wait");
6122 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6123 if (rc == ETIMEDOUT) {
6124 rc = -ENODEV;
6125 LOGE("Unblocked on timeout!!!!");
6126 break;
6127 }
6128 }
6129 LOGD("Unblocked");
6130 if (mWokenUpByDaemon) {
6131 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006132 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006133 break;
6134 }
6135 }
6136 pthread_mutex_unlock(&mMutex);
6137
6138 return rc;
6139}
6140
6141/*===========================================================================
6142 * FUNCTION : dump
6143 *
6144 * DESCRIPTION:
6145 *
6146 * PARAMETERS :
6147 *
6148 *
6149 * RETURN :
6150 *==========================================================================*/
6151void QCamera3HardwareInterface::dump(int fd)
6152{
6153 pthread_mutex_lock(&mMutex);
6154 dprintf(fd, "\n Camera HAL3 information Begin \n");
6155
6156 dprintf(fd, "\nNumber of pending requests: %zu \n",
6157 mPendingRequestsList.size());
6158 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6159 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6160 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6161 for(pendingRequestIterator i = mPendingRequestsList.begin();
6162 i != mPendingRequestsList.end(); i++) {
6163 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6164 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6165 i->input_buffer);
6166 }
6167 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6168 mPendingBuffersMap.get_num_overall_buffers());
6169 dprintf(fd, "-------+------------------\n");
6170 dprintf(fd, " Frame | Stream type mask \n");
6171 dprintf(fd, "-------+------------------\n");
6172 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6173 for(auto &j : req.mPendingBufferList) {
6174 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6175 dprintf(fd, " %5d | %11d \n",
6176 req.frame_number, channel->getStreamTypeMask());
6177 }
6178 }
6179 dprintf(fd, "-------+------------------\n");
6180
6181 dprintf(fd, "\nPending frame drop list: %zu\n",
6182 mPendingFrameDropList.size());
6183 dprintf(fd, "-------+-----------\n");
6184 dprintf(fd, " Frame | Stream ID \n");
6185 dprintf(fd, "-------+-----------\n");
6186 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6187 i != mPendingFrameDropList.end(); i++) {
6188 dprintf(fd, " %5d | %9d \n",
6189 i->frame_number, i->stream_ID);
6190 }
6191 dprintf(fd, "-------+-----------\n");
6192
6193 dprintf(fd, "\n Camera HAL3 information End \n");
6194
6195 /* use dumpsys media.camera as trigger to send update debug level event */
6196 mUpdateDebugLevel = true;
6197 pthread_mutex_unlock(&mMutex);
6198 return;
6199}
6200
6201/*===========================================================================
6202 * FUNCTION : flush
6203 *
6204 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6205 * conditionally restarts channels
6206 *
6207 * PARAMETERS :
6208 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006209 * @ stopChannelImmediately: stop the channel immediately. This should be used
6210 * when device encountered an error and MIPI may has
6211 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006212 *
6213 * RETURN :
6214 * 0 on success
6215 * Error code on failure
6216 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006217int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006218{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006219 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006220 int32_t rc = NO_ERROR;
6221
6222 LOGD("Unblocking Process Capture Request");
6223 pthread_mutex_lock(&mMutex);
6224 mFlush = true;
6225 pthread_mutex_unlock(&mMutex);
6226
6227 rc = stopAllChannels();
6228 // unlink of dualcam
6229 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006230 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6231 &m_pDualCamCmdPtr->bundle_info;
6232 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006233 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6234 pthread_mutex_lock(&gCamLock);
6235
6236 if (mIsMainCamera == 1) {
6237 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6238 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006239 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006240 // related session id should be session id of linked session
6241 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6242 } else {
6243 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6244 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006245 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006246 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6247 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006248 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006249 pthread_mutex_unlock(&gCamLock);
6250
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006251 rc = mCameraHandle->ops->set_dual_cam_cmd(
6252 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006253 if (rc < 0) {
6254 LOGE("Dualcam: Unlink failed, but still proceed to close");
6255 }
6256 }
6257
6258 if (rc < 0) {
6259 LOGE("stopAllChannels failed");
6260 return rc;
6261 }
6262 if (mChannelHandle) {
6263 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006264 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006265 }
6266
6267 // Reset bundle info
6268 rc = setBundleInfo();
6269 if (rc < 0) {
6270 LOGE("setBundleInfo failed %d", rc);
6271 return rc;
6272 }
6273
6274 // Mutex Lock
6275 pthread_mutex_lock(&mMutex);
6276
6277 // Unblock process_capture_request
6278 mPendingLiveRequest = 0;
6279 pthread_cond_signal(&mRequestCond);
6280
6281 rc = notifyErrorForPendingRequests();
6282 if (rc < 0) {
6283 LOGE("notifyErrorForPendingRequests failed");
6284 pthread_mutex_unlock(&mMutex);
6285 return rc;
6286 }
6287
6288 mFlush = false;
6289
6290 // Start the Streams/Channels
6291 if (restartChannels) {
6292 rc = startAllChannels();
6293 if (rc < 0) {
6294 LOGE("startAllChannels failed");
6295 pthread_mutex_unlock(&mMutex);
6296 return rc;
6297 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006298 if (mChannelHandle) {
6299 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006300 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006301 if (rc < 0) {
6302 LOGE("start_channel failed");
6303 pthread_mutex_unlock(&mMutex);
6304 return rc;
6305 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006306 }
6307 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006308 pthread_mutex_unlock(&mMutex);
6309
6310 return 0;
6311}
6312
6313/*===========================================================================
6314 * FUNCTION : flushPerf
6315 *
6316 * DESCRIPTION: This is the performance optimization version of flush that does
6317 * not use stream off, rather flushes the system
6318 *
6319 * PARAMETERS :
6320 *
6321 *
6322 * RETURN : 0 : success
6323 * -EINVAL: input is malformed (device is not valid)
6324 * -ENODEV: if the device has encountered a serious error
6325 *==========================================================================*/
6326int QCamera3HardwareInterface::flushPerf()
6327{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006328 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006329 int32_t rc = 0;
6330 struct timespec timeout;
6331 bool timed_wait = false;
6332
6333 pthread_mutex_lock(&mMutex);
6334 mFlushPerf = true;
6335 mPendingBuffersMap.numPendingBufsAtFlush =
6336 mPendingBuffersMap.get_num_overall_buffers();
6337 LOGD("Calling flush. Wait for %d buffers to return",
6338 mPendingBuffersMap.numPendingBufsAtFlush);
6339
6340 /* send the flush event to the backend */
6341 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6342 if (rc < 0) {
6343 LOGE("Error in flush: IOCTL failure");
6344 mFlushPerf = false;
6345 pthread_mutex_unlock(&mMutex);
6346 return -ENODEV;
6347 }
6348
6349 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6350 LOGD("No pending buffers in HAL, return flush");
6351 mFlushPerf = false;
6352 pthread_mutex_unlock(&mMutex);
6353 return rc;
6354 }
6355
6356 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006357 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006358 if (rc < 0) {
6359 LOGE("Error reading the real time clock, cannot use timed wait");
6360 } else {
6361 timeout.tv_sec += FLUSH_TIMEOUT;
6362 timed_wait = true;
6363 }
6364
6365 //Block on conditional variable
6366 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6367 LOGD("Waiting on mBuffersCond");
6368 if (!timed_wait) {
6369 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6370 if (rc != 0) {
6371 LOGE("pthread_cond_wait failed due to rc = %s",
6372 strerror(rc));
6373 break;
6374 }
6375 } else {
6376 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6377 if (rc != 0) {
6378 LOGE("pthread_cond_timedwait failed due to rc = %s",
6379 strerror(rc));
6380 break;
6381 }
6382 }
6383 }
6384 if (rc != 0) {
6385 mFlushPerf = false;
6386 pthread_mutex_unlock(&mMutex);
6387 return -ENODEV;
6388 }
6389
6390 LOGD("Received buffers, now safe to return them");
6391
6392 //make sure the channels handle flush
6393 //currently only required for the picture channel to release snapshot resources
6394 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6395 it != mStreamInfo.end(); it++) {
6396 QCamera3Channel *channel = (*it)->channel;
6397 if (channel) {
6398 rc = channel->flush();
6399 if (rc) {
6400 LOGE("Flushing the channels failed with error %d", rc);
6401 // even though the channel flush failed we need to continue and
6402 // return the buffers we have to the framework, however the return
6403 // value will be an error
6404 rc = -ENODEV;
6405 }
6406 }
6407 }
6408
6409 /* notify the frameworks and send errored results */
6410 rc = notifyErrorForPendingRequests();
6411 if (rc < 0) {
6412 LOGE("notifyErrorForPendingRequests failed");
6413 pthread_mutex_unlock(&mMutex);
6414 return rc;
6415 }
6416
6417 //unblock process_capture_request
6418 mPendingLiveRequest = 0;
6419 unblockRequestIfNecessary();
6420
6421 mFlushPerf = false;
6422 pthread_mutex_unlock(&mMutex);
6423 LOGD ("Flush Operation complete. rc = %d", rc);
6424 return rc;
6425}
6426
6427/*===========================================================================
6428 * FUNCTION : handleCameraDeviceError
6429 *
6430 * DESCRIPTION: This function calls internal flush and notifies the error to
6431 * framework and updates the state variable.
6432 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006433 * PARAMETERS :
6434 * @stopChannelImmediately : stop channels immediately without waiting for
6435 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006436 *
6437 * RETURN : NO_ERROR on Success
6438 * Error code on failure
6439 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006440int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006441{
6442 int32_t rc = NO_ERROR;
6443
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006444 {
6445 Mutex::Autolock lock(mFlushLock);
6446 pthread_mutex_lock(&mMutex);
6447 if (mState != ERROR) {
6448 //if mState != ERROR, nothing to be done
6449 pthread_mutex_unlock(&mMutex);
6450 return NO_ERROR;
6451 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006452 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006453
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006454 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006455 if (NO_ERROR != rc) {
6456 LOGE("internal flush to handle mState = ERROR failed");
6457 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006458
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006459 pthread_mutex_lock(&mMutex);
6460 mState = DEINIT;
6461 pthread_mutex_unlock(&mMutex);
6462 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006463
6464 camera3_notify_msg_t notify_msg;
6465 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6466 notify_msg.type = CAMERA3_MSG_ERROR;
6467 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6468 notify_msg.message.error.error_stream = NULL;
6469 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006470 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006471
6472 return rc;
6473}
6474
6475/*===========================================================================
6476 * FUNCTION : captureResultCb
6477 *
6478 * DESCRIPTION: Callback handler for all capture result
6479 * (streams, as well as metadata)
6480 *
6481 * PARAMETERS :
6482 * @metadata : metadata information
6483 * @buffer : actual gralloc buffer to be returned to frameworks.
6484 * NULL if metadata.
6485 *
6486 * RETURN : NONE
6487 *==========================================================================*/
6488void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6489 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6490{
6491 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006492 pthread_mutex_lock(&mMutex);
6493 uint8_t batchSize = mBatchSize;
6494 pthread_mutex_unlock(&mMutex);
6495 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006496 handleBatchMetadata(metadata_buf,
6497 true /* free_and_bufdone_meta_buf */);
6498 } else { /* mBatchSize = 0 */
6499 hdrPlusPerfLock(metadata_buf);
6500 pthread_mutex_lock(&mMutex);
6501 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006502 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006503 true /* last urgent frame of batch metadata */,
6504 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006505 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006506 pthread_mutex_unlock(&mMutex);
6507 }
6508 } else if (isInputBuffer) {
6509 pthread_mutex_lock(&mMutex);
6510 handleInputBufferWithLock(frame_number);
6511 pthread_mutex_unlock(&mMutex);
6512 } else {
6513 pthread_mutex_lock(&mMutex);
6514 handleBufferWithLock(buffer, frame_number);
6515 pthread_mutex_unlock(&mMutex);
6516 }
6517 return;
6518}
6519
6520/*===========================================================================
6521 * FUNCTION : getReprocessibleOutputStreamId
6522 *
6523 * DESCRIPTION: Get source output stream id for the input reprocess stream
6524 * based on size and format, which would be the largest
6525 * output stream if an input stream exists.
6526 *
6527 * PARAMETERS :
6528 * @id : return the stream id if found
6529 *
6530 * RETURN : int32_t type of status
6531 * NO_ERROR -- success
6532 * none-zero failure code
6533 *==========================================================================*/
6534int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6535{
6536 /* check if any output or bidirectional stream with the same size and format
6537 and return that stream */
6538 if ((mInputStreamInfo.dim.width > 0) &&
6539 (mInputStreamInfo.dim.height > 0)) {
6540 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6541 it != mStreamInfo.end(); it++) {
6542
6543 camera3_stream_t *stream = (*it)->stream;
6544 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6545 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6546 (stream->format == mInputStreamInfo.format)) {
6547 // Usage flag for an input stream and the source output stream
6548 // may be different.
6549 LOGD("Found reprocessible output stream! %p", *it);
6550 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6551 stream->usage, mInputStreamInfo.usage);
6552
6553 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6554 if (channel != NULL && channel->mStreams[0]) {
6555 id = channel->mStreams[0]->getMyServerID();
6556 return NO_ERROR;
6557 }
6558 }
6559 }
6560 } else {
6561 LOGD("No input stream, so no reprocessible output stream");
6562 }
6563 return NAME_NOT_FOUND;
6564}
6565
6566/*===========================================================================
6567 * FUNCTION : lookupFwkName
6568 *
6569 * DESCRIPTION: In case the enum is not same in fwk and backend
6570 * make sure the parameter is correctly propogated
6571 *
6572 * PARAMETERS :
6573 * @arr : map between the two enums
6574 * @len : len of the map
6575 * @hal_name : name of the hal_parm to map
6576 *
6577 * RETURN : int type of status
6578 * fwk_name -- success
6579 * none-zero failure code
6580 *==========================================================================*/
6581template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6582 size_t len, halType hal_name)
6583{
6584
6585 for (size_t i = 0; i < len; i++) {
6586 if (arr[i].hal_name == hal_name) {
6587 return arr[i].fwk_name;
6588 }
6589 }
6590
6591 /* Not able to find matching framework type is not necessarily
6592 * an error case. This happens when mm-camera supports more attributes
6593 * than the frameworks do */
6594 LOGH("Cannot find matching framework type");
6595 return NAME_NOT_FOUND;
6596}
6597
6598/*===========================================================================
6599 * FUNCTION : lookupHalName
6600 *
6601 * DESCRIPTION: In case the enum is not same in fwk and backend
6602 * make sure the parameter is correctly propogated
6603 *
6604 * PARAMETERS :
6605 * @arr : map between the two enums
6606 * @len : len of the map
6607 * @fwk_name : name of the hal_parm to map
6608 *
6609 * RETURN : int32_t type of status
6610 * hal_name -- success
6611 * none-zero failure code
6612 *==========================================================================*/
6613template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6614 size_t len, fwkType fwk_name)
6615{
6616 for (size_t i = 0; i < len; i++) {
6617 if (arr[i].fwk_name == fwk_name) {
6618 return arr[i].hal_name;
6619 }
6620 }
6621
6622 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6623 return NAME_NOT_FOUND;
6624}
6625
6626/*===========================================================================
6627 * FUNCTION : lookupProp
6628 *
6629 * DESCRIPTION: lookup a value by its name
6630 *
6631 * PARAMETERS :
6632 * @arr : map between the two enums
6633 * @len : size of the map
6634 * @name : name to be looked up
6635 *
6636 * RETURN : Value if found
6637 * CAM_CDS_MODE_MAX if not found
6638 *==========================================================================*/
6639template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6640 size_t len, const char *name)
6641{
6642 if (name) {
6643 for (size_t i = 0; i < len; i++) {
6644 if (!strcmp(arr[i].desc, name)) {
6645 return arr[i].val;
6646 }
6647 }
6648 }
6649 return CAM_CDS_MODE_MAX;
6650}
6651
6652/*===========================================================================
6653 *
6654 * DESCRIPTION:
6655 *
6656 * PARAMETERS :
6657 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006658 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006659 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006660 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6661 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006662 *
6663 * RETURN : camera_metadata_t*
6664 * metadata in a format specified by fwk
6665 *==========================================================================*/
6666camera_metadata_t*
6667QCamera3HardwareInterface::translateFromHalMetadata(
6668 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006669 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006670 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006671 bool lastMetadataInBatch,
6672 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006673{
6674 CameraMetadata camMetadata;
6675 camera_metadata_t *resultMetadata;
6676
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006677 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006678 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6679 * Timestamp is needed because it's used for shutter notify calculation.
6680 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006681 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006682 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006683 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006684 }
6685
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006686 if (pendingRequest.jpegMetadata.entryCount())
6687 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006688
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006689 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6690 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6691 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6692 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6693 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006694 if (mBatchSize == 0) {
6695 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006696 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006697 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006698
Samuel Ha68ba5172016-12-15 18:41:12 -08006699 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6700 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006701 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006702 // DevCamDebug metadata translateFromHalMetadata AF
6703 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6704 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6705 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6706 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6707 }
6708 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6709 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6710 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6711 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6712 }
6713 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6714 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6715 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6716 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6717 }
6718 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6719 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6720 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6721 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6722 }
6723 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6724 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6725 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6726 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6727 }
6728 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6729 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6730 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6731 *DevCamDebug_af_monitor_pdaf_target_pos;
6732 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6733 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6734 }
6735 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6736 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6737 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6738 *DevCamDebug_af_monitor_pdaf_confidence;
6739 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6740 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6741 }
6742 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6743 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6744 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6745 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6746 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6747 }
6748 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6749 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6750 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6751 *DevCamDebug_af_monitor_tof_target_pos;
6752 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6753 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6754 }
6755 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6756 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6757 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6758 *DevCamDebug_af_monitor_tof_confidence;
6759 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6760 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6761 }
6762 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6763 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6764 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6765 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6766 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6767 }
6768 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6769 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6770 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6771 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6772 &fwk_DevCamDebug_af_monitor_type_select, 1);
6773 }
6774 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6775 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6776 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6777 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6778 &fwk_DevCamDebug_af_monitor_refocus, 1);
6779 }
6780 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6781 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6782 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6783 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6784 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6785 }
6786 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6787 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6788 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6789 *DevCamDebug_af_search_pdaf_target_pos;
6790 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6791 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6792 }
6793 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6794 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6795 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6796 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6797 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6798 }
6799 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6800 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6801 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6802 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6803 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6804 }
6805 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6806 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6807 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6808 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6809 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6810 }
6811 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6812 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6813 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6814 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6815 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6816 }
6817 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6818 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6819 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6820 *DevCamDebug_af_search_tof_target_pos;
6821 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6822 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6823 }
6824 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6825 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6826 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6827 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6828 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6829 }
6830 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6831 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6832 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6833 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6834 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6835 }
6836 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6837 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6838 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6839 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6840 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6841 }
6842 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6843 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6844 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6845 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6846 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6847 }
6848 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6849 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6850 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6851 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6852 &fwk_DevCamDebug_af_search_type_select, 1);
6853 }
6854 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6855 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6856 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6857 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6858 &fwk_DevCamDebug_af_search_next_pos, 1);
6859 }
6860 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6861 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6862 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6863 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6864 &fwk_DevCamDebug_af_search_target_pos, 1);
6865 }
6866 // DevCamDebug metadata translateFromHalMetadata AEC
6867 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6868 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6869 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6870 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6871 }
6872 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6873 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6874 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6875 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6876 }
6877 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6878 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6879 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6880 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6881 }
6882 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6883 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6884 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6885 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6886 }
6887 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6888 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6889 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6890 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6891 }
6892 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6893 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6894 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6895 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6896 }
6897 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6898 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6899 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6900 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6901 }
6902 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6903 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6904 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6905 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6906 }
Samuel Ha34229982017-02-17 13:51:11 -08006907 // DevCamDebug metadata translateFromHalMetadata zzHDR
6908 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6909 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6910 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6911 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6912 }
6913 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6914 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006915 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006916 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6917 }
6918 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6919 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6920 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6921 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6922 }
6923 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6924 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006925 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006926 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6927 }
6928 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6929 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6930 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6931 *DevCamDebug_aec_hdr_sensitivity_ratio;
6932 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6933 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6934 }
6935 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6936 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6937 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6938 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6939 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6940 }
6941 // DevCamDebug metadata translateFromHalMetadata ADRC
6942 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6943 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6944 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6945 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6946 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6947 }
6948 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6949 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6950 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6951 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6952 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6953 }
6954 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6955 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6956 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6957 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6958 }
6959 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6960 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6961 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6962 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6963 }
6964 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6965 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6966 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6967 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6968 }
6969 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6970 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6971 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6972 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6973 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006974 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6975 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6976 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6977 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6978 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6979 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6980 }
6981 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6982 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6983 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6984 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6985 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6986 }
6987 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6988 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6989 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6990 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6991 &fwk_DevCamDebug_aec_subject_motion, 1);
6992 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006993 // DevCamDebug metadata translateFromHalMetadata AWB
6994 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6995 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6996 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6997 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6998 }
6999 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7000 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7001 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7002 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7003 }
7004 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7005 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7006 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7007 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7008 }
7009 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7010 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7011 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7012 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7013 }
7014 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7015 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7016 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7017 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7018 }
7019 }
7020 // atrace_end(ATRACE_TAG_ALWAYS);
7021
Thierry Strudel3d639192016-09-09 11:52:26 -07007022 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7023 int64_t fwk_frame_number = *frame_number;
7024 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7025 }
7026
7027 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7028 int32_t fps_range[2];
7029 fps_range[0] = (int32_t)float_range->min_fps;
7030 fps_range[1] = (int32_t)float_range->max_fps;
7031 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7032 fps_range, 2);
7033 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7034 fps_range[0], fps_range[1]);
7035 }
7036
7037 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7038 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7039 }
7040
7041 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7042 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7043 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7044 *sceneMode);
7045 if (NAME_NOT_FOUND != val) {
7046 uint8_t fwkSceneMode = (uint8_t)val;
7047 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7048 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7049 fwkSceneMode);
7050 }
7051 }
7052
7053 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7054 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7055 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7056 }
7057
7058 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7059 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7060 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7061 }
7062
7063 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7064 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7065 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7066 }
7067
7068 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7069 CAM_INTF_META_EDGE_MODE, metadata) {
7070 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7071 }
7072
7073 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7074 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7075 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7076 }
7077
7078 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7079 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7080 }
7081
7082 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7083 if (0 <= *flashState) {
7084 uint8_t fwk_flashState = (uint8_t) *flashState;
7085 if (!gCamCapability[mCameraId]->flash_available) {
7086 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7087 }
7088 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7089 }
7090 }
7091
7092 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7093 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7094 if (NAME_NOT_FOUND != val) {
7095 uint8_t fwk_flashMode = (uint8_t)val;
7096 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7097 }
7098 }
7099
7100 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7101 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7102 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7103 }
7104
7105 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7106 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7107 }
7108
7109 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7110 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7111 }
7112
7113 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7114 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7115 }
7116
7117 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7118 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7119 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7120 }
7121
7122 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7123 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7124 LOGD("fwk_videoStab = %d", fwk_videoStab);
7125 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7126 } else {
7127 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7128 // and so hardcoding the Video Stab result to OFF mode.
7129 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7130 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007131 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007132 }
7133
7134 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7135 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7136 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7137 }
7138
7139 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7140 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7141 }
7142
Thierry Strudel3d639192016-09-09 11:52:26 -07007143 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7144 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007145 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007146
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007147 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7148 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007149
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007150 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007151 blackLevelAppliedPattern->cam_black_level[0],
7152 blackLevelAppliedPattern->cam_black_level[1],
7153 blackLevelAppliedPattern->cam_black_level[2],
7154 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007155 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7156 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007157
7158#ifndef USE_HAL_3_3
7159 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307160 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007161 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307162 fwk_blackLevelInd[0] /= 16.0;
7163 fwk_blackLevelInd[1] /= 16.0;
7164 fwk_blackLevelInd[2] /= 16.0;
7165 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007166 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7167 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007168#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007169 }
7170
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007171#ifndef USE_HAL_3_3
7172 // Fixed whitelevel is used by ISP/Sensor
7173 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7174 &gCamCapability[mCameraId]->white_level, 1);
7175#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007176
7177 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7178 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7179 int32_t scalerCropRegion[4];
7180 scalerCropRegion[0] = hScalerCropRegion->left;
7181 scalerCropRegion[1] = hScalerCropRegion->top;
7182 scalerCropRegion[2] = hScalerCropRegion->width;
7183 scalerCropRegion[3] = hScalerCropRegion->height;
7184
7185 // Adjust crop region from sensor output coordinate system to active
7186 // array coordinate system.
7187 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7188 scalerCropRegion[2], scalerCropRegion[3]);
7189
7190 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7191 }
7192
7193 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7194 LOGD("sensorExpTime = %lld", *sensorExpTime);
7195 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7196 }
7197
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007198 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7199 LOGD("expTimeBoost = %f", *expTimeBoost);
7200 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7201 }
7202
Thierry Strudel3d639192016-09-09 11:52:26 -07007203 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7204 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7205 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7206 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7207 }
7208
7209 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7210 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7211 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7212 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7213 sensorRollingShutterSkew, 1);
7214 }
7215
7216 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7217 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7218 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7219
7220 //calculate the noise profile based on sensitivity
7221 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7222 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7223 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7224 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7225 noise_profile[i] = noise_profile_S;
7226 noise_profile[i+1] = noise_profile_O;
7227 }
7228 LOGD("noise model entry (S, O) is (%f, %f)",
7229 noise_profile_S, noise_profile_O);
7230 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7231 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7232 }
7233
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007234#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007235 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007236 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007237 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007238 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007239 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7240 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7241 }
7242 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007243#endif
7244
Thierry Strudel3d639192016-09-09 11:52:26 -07007245 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7246 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7247 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7248 }
7249
7250 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7251 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7252 *faceDetectMode);
7253 if (NAME_NOT_FOUND != val) {
7254 uint8_t fwk_faceDetectMode = (uint8_t)val;
7255 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7256
7257 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7258 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7259 CAM_INTF_META_FACE_DETECTION, metadata) {
7260 uint8_t numFaces = MIN(
7261 faceDetectionInfo->num_faces_detected, MAX_ROI);
7262 int32_t faceIds[MAX_ROI];
7263 uint8_t faceScores[MAX_ROI];
7264 int32_t faceRectangles[MAX_ROI * 4];
7265 int32_t faceLandmarks[MAX_ROI * 6];
7266 size_t j = 0, k = 0;
7267
7268 for (size_t i = 0; i < numFaces; i++) {
7269 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7270 // Adjust crop region from sensor output coordinate system to active
7271 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007272 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007273 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7274 rect.width, rect.height);
7275
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007276 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007277
Jason Lee8ce36fa2017-04-19 19:40:37 -07007278 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7279 "bottom-right (%d, %d)",
7280 faceDetectionInfo->frame_id, i,
7281 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7282 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7283
Thierry Strudel3d639192016-09-09 11:52:26 -07007284 j+= 4;
7285 }
7286 if (numFaces <= 0) {
7287 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7288 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7289 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7290 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7291 }
7292
7293 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7294 numFaces);
7295 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7296 faceRectangles, numFaces * 4U);
7297 if (fwk_faceDetectMode ==
7298 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7299 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7300 CAM_INTF_META_FACE_LANDMARK, metadata) {
7301
7302 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007303 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007304 // Map the co-ordinate sensor output coordinate system to active
7305 // array coordinate system.
7306 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007307 face_landmarks.left_eye_center.x,
7308 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007309 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007310 face_landmarks.right_eye_center.x,
7311 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007312 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007313 face_landmarks.mouth_center.x,
7314 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007315
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007316 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007317
7318 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7319 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7320 faceDetectionInfo->frame_id, i,
7321 faceLandmarks[k + LEFT_EYE_X],
7322 faceLandmarks[k + LEFT_EYE_Y],
7323 faceLandmarks[k + RIGHT_EYE_X],
7324 faceLandmarks[k + RIGHT_EYE_Y],
7325 faceLandmarks[k + MOUTH_X],
7326 faceLandmarks[k + MOUTH_Y]);
7327
Thierry Strudel04e026f2016-10-10 11:27:36 -07007328 k+= TOTAL_LANDMARK_INDICES;
7329 }
7330 } else {
7331 for (size_t i = 0; i < numFaces; i++) {
7332 setInvalidLandmarks(faceLandmarks+k);
7333 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007334 }
7335 }
7336
Jason Lee49619db2017-04-13 12:07:22 -07007337 for (size_t i = 0; i < numFaces; i++) {
7338 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7339
7340 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7341 faceDetectionInfo->frame_id, i, faceIds[i]);
7342 }
7343
Thierry Strudel3d639192016-09-09 11:52:26 -07007344 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7345 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7346 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007347 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007348 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7349 CAM_INTF_META_FACE_BLINK, metadata) {
7350 uint8_t detected[MAX_ROI];
7351 uint8_t degree[MAX_ROI * 2];
7352 for (size_t i = 0; i < numFaces; i++) {
7353 detected[i] = blinks->blink[i].blink_detected;
7354 degree[2 * i] = blinks->blink[i].left_blink;
7355 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007356
Jason Lee49619db2017-04-13 12:07:22 -07007357 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7358 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7359 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7360 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007361 }
7362 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7363 detected, numFaces);
7364 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7365 degree, numFaces * 2);
7366 }
7367 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7368 CAM_INTF_META_FACE_SMILE, metadata) {
7369 uint8_t degree[MAX_ROI];
7370 uint8_t confidence[MAX_ROI];
7371 for (size_t i = 0; i < numFaces; i++) {
7372 degree[i] = smiles->smile[i].smile_degree;
7373 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007374
Jason Lee49619db2017-04-13 12:07:22 -07007375 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7376 "smile_degree=%d, smile_score=%d",
7377 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007378 }
7379 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7380 degree, numFaces);
7381 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7382 confidence, numFaces);
7383 }
7384 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7385 CAM_INTF_META_FACE_GAZE, metadata) {
7386 int8_t angle[MAX_ROI];
7387 int32_t direction[MAX_ROI * 3];
7388 int8_t degree[MAX_ROI * 2];
7389 for (size_t i = 0; i < numFaces; i++) {
7390 angle[i] = gazes->gaze[i].gaze_angle;
7391 direction[3 * i] = gazes->gaze[i].updown_dir;
7392 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7393 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7394 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7395 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007396
7397 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7398 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7399 "left_right_gaze=%d, top_bottom_gaze=%d",
7400 faceDetectionInfo->frame_id, i, angle[i],
7401 direction[3 * i], direction[3 * i + 1],
7402 direction[3 * i + 2],
7403 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007404 }
7405 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7406 (uint8_t *)angle, numFaces);
7407 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7408 direction, numFaces * 3);
7409 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7410 (uint8_t *)degree, numFaces * 2);
7411 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007412 }
7413 }
7414 }
7415 }
7416
7417 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7418 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007419 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007420 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007421 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007422
Shuzhen Wang14415f52016-11-16 18:26:18 -08007423 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7424 histogramBins = *histBins;
7425 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7426 }
7427
7428 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007429 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7430 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007431 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007432
7433 switch (stats_data->type) {
7434 case CAM_HISTOGRAM_TYPE_BAYER:
7435 switch (stats_data->bayer_stats.data_type) {
7436 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007437 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7438 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007439 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007440 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7441 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007442 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007443 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7444 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007445 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007446 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007447 case CAM_STATS_CHANNEL_R:
7448 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007449 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7450 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007451 }
7452 break;
7453 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007454 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007455 break;
7456 }
7457
Shuzhen Wang14415f52016-11-16 18:26:18 -08007458 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007459 }
7460 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007461 }
7462
7463 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7464 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7465 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7466 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7467 }
7468
7469 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7470 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7471 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7472 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7473 }
7474
7475 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7476 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7477 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7478 CAM_MAX_SHADING_MAP_HEIGHT);
7479 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7480 CAM_MAX_SHADING_MAP_WIDTH);
7481 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7482 lensShadingMap->lens_shading, 4U * map_width * map_height);
7483 }
7484
7485 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7486 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7487 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7488 }
7489
7490 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7491 //Populate CAM_INTF_META_TONEMAP_CURVES
7492 /* ch0 = G, ch 1 = B, ch 2 = R*/
7493 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7494 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7495 tonemap->tonemap_points_cnt,
7496 CAM_MAX_TONEMAP_CURVE_SIZE);
7497 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7498 }
7499
7500 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7501 &tonemap->curves[0].tonemap_points[0][0],
7502 tonemap->tonemap_points_cnt * 2);
7503
7504 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7505 &tonemap->curves[1].tonemap_points[0][0],
7506 tonemap->tonemap_points_cnt * 2);
7507
7508 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7509 &tonemap->curves[2].tonemap_points[0][0],
7510 tonemap->tonemap_points_cnt * 2);
7511 }
7512
7513 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7514 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7515 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7516 CC_GAIN_MAX);
7517 }
7518
7519 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7520 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7521 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7522 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7523 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7524 }
7525
7526 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7527 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7528 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7529 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7530 toneCurve->tonemap_points_cnt,
7531 CAM_MAX_TONEMAP_CURVE_SIZE);
7532 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7533 }
7534 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7535 (float*)toneCurve->curve.tonemap_points,
7536 toneCurve->tonemap_points_cnt * 2);
7537 }
7538
7539 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7540 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7541 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7542 predColorCorrectionGains->gains, 4);
7543 }
7544
7545 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7546 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7547 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7548 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7549 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7550 }
7551
7552 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7553 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7554 }
7555
7556 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7557 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7558 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7559 }
7560
7561 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7562 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7563 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7564 }
7565
7566 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7567 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7568 *effectMode);
7569 if (NAME_NOT_FOUND != val) {
7570 uint8_t fwk_effectMode = (uint8_t)val;
7571 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7572 }
7573 }
7574
7575 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7576 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7577 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7578 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7579 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7580 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7581 }
7582 int32_t fwk_testPatternData[4];
7583 fwk_testPatternData[0] = testPatternData->r;
7584 fwk_testPatternData[3] = testPatternData->b;
7585 switch (gCamCapability[mCameraId]->color_arrangement) {
7586 case CAM_FILTER_ARRANGEMENT_RGGB:
7587 case CAM_FILTER_ARRANGEMENT_GRBG:
7588 fwk_testPatternData[1] = testPatternData->gr;
7589 fwk_testPatternData[2] = testPatternData->gb;
7590 break;
7591 case CAM_FILTER_ARRANGEMENT_GBRG:
7592 case CAM_FILTER_ARRANGEMENT_BGGR:
7593 fwk_testPatternData[2] = testPatternData->gr;
7594 fwk_testPatternData[1] = testPatternData->gb;
7595 break;
7596 default:
7597 LOGE("color arrangement %d is not supported",
7598 gCamCapability[mCameraId]->color_arrangement);
7599 break;
7600 }
7601 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7602 }
7603
7604 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7605 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7606 }
7607
7608 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7609 String8 str((const char *)gps_methods);
7610 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7611 }
7612
7613 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7614 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7615 }
7616
7617 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7618 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7619 }
7620
7621 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7622 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7623 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7624 }
7625
7626 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7627 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7628 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7629 }
7630
7631 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7632 int32_t fwk_thumb_size[2];
7633 fwk_thumb_size[0] = thumb_size->width;
7634 fwk_thumb_size[1] = thumb_size->height;
7635 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7636 }
7637
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007638 // Skip reprocess metadata if there is no input stream.
7639 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7640 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7641 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7642 privateData,
7643 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7644 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007645 }
7646
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007647 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007648 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007649 meteringMode, 1);
7650 }
7651
Thierry Strudel54dc9782017-02-15 12:12:10 -08007652 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7653 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7654 LOGD("hdr_scene_data: %d %f\n",
7655 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7656 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7657 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7658 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7659 &isHdr, 1);
7660 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7661 &isHdrConfidence, 1);
7662 }
7663
7664
7665
Thierry Strudel3d639192016-09-09 11:52:26 -07007666 if (metadata->is_tuning_params_valid) {
7667 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7668 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7669 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7670
7671
7672 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7673 sizeof(uint32_t));
7674 data += sizeof(uint32_t);
7675
7676 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7677 sizeof(uint32_t));
7678 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7679 data += sizeof(uint32_t);
7680
7681 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7682 sizeof(uint32_t));
7683 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7684 data += sizeof(uint32_t);
7685
7686 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7687 sizeof(uint32_t));
7688 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7689 data += sizeof(uint32_t);
7690
7691 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7692 sizeof(uint32_t));
7693 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7694 data += sizeof(uint32_t);
7695
7696 metadata->tuning_params.tuning_mod3_data_size = 0;
7697 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7698 sizeof(uint32_t));
7699 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7700 data += sizeof(uint32_t);
7701
7702 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7703 TUNING_SENSOR_DATA_MAX);
7704 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7705 count);
7706 data += count;
7707
7708 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7709 TUNING_VFE_DATA_MAX);
7710 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7711 count);
7712 data += count;
7713
7714 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7715 TUNING_CPP_DATA_MAX);
7716 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7717 count);
7718 data += count;
7719
7720 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7721 TUNING_CAC_DATA_MAX);
7722 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7723 count);
7724 data += count;
7725
7726 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7727 (int32_t *)(void *)tuning_meta_data_blob,
7728 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7729 }
7730
7731 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7732 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7733 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7734 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7735 NEUTRAL_COL_POINTS);
7736 }
7737
7738 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7739 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7740 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7741 }
7742
7743 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7744 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7745 // Adjust crop region from sensor output coordinate system to active
7746 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007747 cam_rect_t hAeRect = hAeRegions->rect;
7748 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7749 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007750
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007751 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007752 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7753 REGIONS_TUPLE_COUNT);
7754 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7755 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007756 hAeRect.left, hAeRect.top, hAeRect.width,
7757 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007758 }
7759
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007760 if (!pendingRequest.focusStateSent) {
7761 if (pendingRequest.focusStateValid) {
7762 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7763 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007764 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007765 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7766 uint8_t fwk_afState = (uint8_t) *afState;
7767 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7768 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7769 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007770 }
7771 }
7772
Thierry Strudel3d639192016-09-09 11:52:26 -07007773 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7774 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7775 }
7776
7777 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7778 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7779 }
7780
7781 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7782 uint8_t fwk_lensState = *lensState;
7783 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7784 }
7785
Thierry Strudel3d639192016-09-09 11:52:26 -07007786 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007787 uint32_t ab_mode = *hal_ab_mode;
7788 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7789 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7790 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7791 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007792 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007793 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007794 if (NAME_NOT_FOUND != val) {
7795 uint8_t fwk_ab_mode = (uint8_t)val;
7796 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7797 }
7798 }
7799
7800 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7801 int val = lookupFwkName(SCENE_MODES_MAP,
7802 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7803 if (NAME_NOT_FOUND != val) {
7804 uint8_t fwkBestshotMode = (uint8_t)val;
7805 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7806 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7807 } else {
7808 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7809 }
7810 }
7811
7812 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7813 uint8_t fwk_mode = (uint8_t) *mode;
7814 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7815 }
7816
7817 /* Constant metadata values to be update*/
7818 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7819 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7820
7821 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7822 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7823
7824 int32_t hotPixelMap[2];
7825 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7826
7827 // CDS
7828 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7829 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7830 }
7831
Thierry Strudel04e026f2016-10-10 11:27:36 -07007832 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7833 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007834 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007835 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7836 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7837 } else {
7838 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7839 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007840
7841 if(fwk_hdr != curr_hdr_state) {
7842 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7843 if(fwk_hdr)
7844 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7845 else
7846 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7847 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007848 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7849 }
7850
Thierry Strudel54dc9782017-02-15 12:12:10 -08007851 //binning correction
7852 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7853 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7854 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7855 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7856 }
7857
Thierry Strudel04e026f2016-10-10 11:27:36 -07007858 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007859 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007860 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7861 int8_t is_ir_on = 0;
7862
7863 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7864 if(is_ir_on != curr_ir_state) {
7865 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7866 if(is_ir_on)
7867 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7868 else
7869 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7870 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007871 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007872 }
7873
Thierry Strudel269c81a2016-10-12 12:13:59 -07007874 // AEC SPEED
7875 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7876 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7877 }
7878
7879 // AWB SPEED
7880 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7881 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7882 }
7883
Thierry Strudel3d639192016-09-09 11:52:26 -07007884 // TNR
7885 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7886 uint8_t tnr_enable = tnr->denoise_enable;
7887 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007888 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7889 int8_t is_tnr_on = 0;
7890
7891 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7892 if(is_tnr_on != curr_tnr_state) {
7893 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7894 if(is_tnr_on)
7895 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7896 else
7897 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7898 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007899
7900 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7901 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7902 }
7903
7904 // Reprocess crop data
7905 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7906 uint8_t cnt = crop_data->num_of_streams;
7907 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7908 // mm-qcamera-daemon only posts crop_data for streams
7909 // not linked to pproc. So no valid crop metadata is not
7910 // necessarily an error case.
7911 LOGD("No valid crop metadata entries");
7912 } else {
7913 uint32_t reproc_stream_id;
7914 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7915 LOGD("No reprocessible stream found, ignore crop data");
7916 } else {
7917 int rc = NO_ERROR;
7918 Vector<int32_t> roi_map;
7919 int32_t *crop = new int32_t[cnt*4];
7920 if (NULL == crop) {
7921 rc = NO_MEMORY;
7922 }
7923 if (NO_ERROR == rc) {
7924 int32_t streams_found = 0;
7925 for (size_t i = 0; i < cnt; i++) {
7926 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7927 if (pprocDone) {
7928 // HAL already does internal reprocessing,
7929 // either via reprocessing before JPEG encoding,
7930 // or offline postprocessing for pproc bypass case.
7931 crop[0] = 0;
7932 crop[1] = 0;
7933 crop[2] = mInputStreamInfo.dim.width;
7934 crop[3] = mInputStreamInfo.dim.height;
7935 } else {
7936 crop[0] = crop_data->crop_info[i].crop.left;
7937 crop[1] = crop_data->crop_info[i].crop.top;
7938 crop[2] = crop_data->crop_info[i].crop.width;
7939 crop[3] = crop_data->crop_info[i].crop.height;
7940 }
7941 roi_map.add(crop_data->crop_info[i].roi_map.left);
7942 roi_map.add(crop_data->crop_info[i].roi_map.top);
7943 roi_map.add(crop_data->crop_info[i].roi_map.width);
7944 roi_map.add(crop_data->crop_info[i].roi_map.height);
7945 streams_found++;
7946 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7947 crop[0], crop[1], crop[2], crop[3]);
7948 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7949 crop_data->crop_info[i].roi_map.left,
7950 crop_data->crop_info[i].roi_map.top,
7951 crop_data->crop_info[i].roi_map.width,
7952 crop_data->crop_info[i].roi_map.height);
7953 break;
7954
7955 }
7956 }
7957 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7958 &streams_found, 1);
7959 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7960 crop, (size_t)(streams_found * 4));
7961 if (roi_map.array()) {
7962 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7963 roi_map.array(), roi_map.size());
7964 }
7965 }
7966 if (crop) {
7967 delete [] crop;
7968 }
7969 }
7970 }
7971 }
7972
7973 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7974 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7975 // so hardcoding the CAC result to OFF mode.
7976 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7977 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7978 } else {
7979 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7980 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7981 *cacMode);
7982 if (NAME_NOT_FOUND != val) {
7983 uint8_t resultCacMode = (uint8_t)val;
7984 // check whether CAC result from CB is equal to Framework set CAC mode
7985 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007986 if (pendingRequest.fwkCacMode != resultCacMode) {
7987 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007988 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007989 //Check if CAC is disabled by property
7990 if (m_cacModeDisabled) {
7991 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7992 }
7993
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007994 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007995 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7996 } else {
7997 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7998 }
7999 }
8000 }
8001
8002 // Post blob of cam_cds_data through vendor tag.
8003 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8004 uint8_t cnt = cdsInfo->num_of_streams;
8005 cam_cds_data_t cdsDataOverride;
8006 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8007 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8008 cdsDataOverride.num_of_streams = 1;
8009 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8010 uint32_t reproc_stream_id;
8011 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8012 LOGD("No reprocessible stream found, ignore cds data");
8013 } else {
8014 for (size_t i = 0; i < cnt; i++) {
8015 if (cdsInfo->cds_info[i].stream_id ==
8016 reproc_stream_id) {
8017 cdsDataOverride.cds_info[0].cds_enable =
8018 cdsInfo->cds_info[i].cds_enable;
8019 break;
8020 }
8021 }
8022 }
8023 } else {
8024 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8025 }
8026 camMetadata.update(QCAMERA3_CDS_INFO,
8027 (uint8_t *)&cdsDataOverride,
8028 sizeof(cam_cds_data_t));
8029 }
8030
8031 // Ldaf calibration data
8032 if (!mLdafCalibExist) {
8033 IF_META_AVAILABLE(uint32_t, ldafCalib,
8034 CAM_INTF_META_LDAF_EXIF, metadata) {
8035 mLdafCalibExist = true;
8036 mLdafCalib[0] = ldafCalib[0];
8037 mLdafCalib[1] = ldafCalib[1];
8038 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8039 ldafCalib[0], ldafCalib[1]);
8040 }
8041 }
8042
Thierry Strudel54dc9782017-02-15 12:12:10 -08008043 // EXIF debug data through vendor tag
8044 /*
8045 * Mobicat Mask can assume 3 values:
8046 * 1 refers to Mobicat data,
8047 * 2 refers to Stats Debug and Exif Debug Data
8048 * 3 refers to Mobicat and Stats Debug Data
8049 * We want to make sure that we are sending Exif debug data
8050 * only when Mobicat Mask is 2.
8051 */
8052 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8053 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8054 (uint8_t *)(void *)mExifParams.debug_params,
8055 sizeof(mm_jpeg_debug_exif_params_t));
8056 }
8057
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008058 // Reprocess and DDM debug data through vendor tag
8059 cam_reprocess_info_t repro_info;
8060 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008061 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8062 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008063 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008064 }
8065 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8066 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008067 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008068 }
8069 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8070 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008071 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008072 }
8073 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8074 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008075 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008076 }
8077 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8078 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008079 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008080 }
8081 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008082 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008083 }
8084 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8085 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008086 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008087 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008088 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8089 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8090 }
8091 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8092 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8093 }
8094 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8095 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008096
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008097 // INSTANT AEC MODE
8098 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8099 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8100 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8101 }
8102
Shuzhen Wange763e802016-03-31 10:24:29 -07008103 // AF scene change
8104 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8105 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8106 }
8107
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008108 // Enable ZSL
8109 if (enableZsl != nullptr) {
8110 uint8_t value = *enableZsl ?
8111 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8112 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8113 }
8114
Xu Han821ea9c2017-05-23 09:00:40 -07008115 // OIS Data
8116 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8117 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8118 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8119 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8120 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8121 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8122 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8123 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8124 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8125 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8126 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008127 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8128 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8129 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8130 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008131 }
8132
Thierry Strudel3d639192016-09-09 11:52:26 -07008133 resultMetadata = camMetadata.release();
8134 return resultMetadata;
8135}
8136
8137/*===========================================================================
8138 * FUNCTION : saveExifParams
8139 *
8140 * DESCRIPTION:
8141 *
8142 * PARAMETERS :
8143 * @metadata : metadata information from callback
8144 *
8145 * RETURN : none
8146 *
8147 *==========================================================================*/
8148void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8149{
8150 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8151 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8152 if (mExifParams.debug_params) {
8153 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8154 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8155 }
8156 }
8157 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8158 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8159 if (mExifParams.debug_params) {
8160 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8161 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8162 }
8163 }
8164 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8165 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8166 if (mExifParams.debug_params) {
8167 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8168 mExifParams.debug_params->af_debug_params_valid = TRUE;
8169 }
8170 }
8171 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8172 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8173 if (mExifParams.debug_params) {
8174 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8175 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8176 }
8177 }
8178 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8179 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8180 if (mExifParams.debug_params) {
8181 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8182 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8183 }
8184 }
8185 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8186 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8187 if (mExifParams.debug_params) {
8188 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8189 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8190 }
8191 }
8192 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8193 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8194 if (mExifParams.debug_params) {
8195 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8196 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8197 }
8198 }
8199 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8200 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8201 if (mExifParams.debug_params) {
8202 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8203 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8204 }
8205 }
8206}
8207
8208/*===========================================================================
8209 * FUNCTION : get3AExifParams
8210 *
8211 * DESCRIPTION:
8212 *
8213 * PARAMETERS : none
8214 *
8215 *
8216 * RETURN : mm_jpeg_exif_params_t
8217 *
8218 *==========================================================================*/
8219mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8220{
8221 return mExifParams;
8222}
8223
8224/*===========================================================================
8225 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8226 *
8227 * DESCRIPTION:
8228 *
8229 * PARAMETERS :
8230 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008231 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8232 * urgent metadata in a batch. Always true for
8233 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008234 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008235 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8236 * i.e. even though it doesn't map to a valid partial
8237 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008238 * RETURN : camera_metadata_t*
8239 * metadata in a format specified by fwk
8240 *==========================================================================*/
8241camera_metadata_t*
8242QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008243 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008244 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008245{
8246 CameraMetadata camMetadata;
8247 camera_metadata_t *resultMetadata;
8248
Shuzhen Wang485e2442017-08-02 12:21:08 -07008249 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008250 /* In batch mode, use empty metadata if this is not the last in batch
8251 */
8252 resultMetadata = allocate_camera_metadata(0, 0);
8253 return resultMetadata;
8254 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008255
8256 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8257 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8258 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8259 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8260 }
8261
8262 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8263 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8264 &aecTrigger->trigger, 1);
8265 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8266 &aecTrigger->trigger_id, 1);
8267 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8268 aecTrigger->trigger);
8269 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8270 aecTrigger->trigger_id);
8271 }
8272
8273 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8274 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8275 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8276 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8277 }
8278
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008279 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8280 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8281 if (NAME_NOT_FOUND != val) {
8282 uint8_t fwkAfMode = (uint8_t)val;
8283 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8284 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8285 } else {
8286 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8287 val);
8288 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008289 }
8290
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008291 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8292 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8293 af_trigger->trigger);
8294 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8295 af_trigger->trigger_id);
8296
8297 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8298 mAfTrigger = *af_trigger;
8299 uint32_t fwk_AfState = (uint32_t) *afState;
8300
8301 // If this is the result for a new trigger, check if there is new early
8302 // af state. If there is, use the last af state for all results
8303 // preceding current partial frame number.
8304 for (auto & pendingRequest : mPendingRequestsList) {
8305 if (pendingRequest.frame_number < frame_number) {
8306 pendingRequest.focusStateValid = true;
8307 pendingRequest.focusState = fwk_AfState;
8308 } else if (pendingRequest.frame_number == frame_number) {
8309 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8310 // Check if early AF state for trigger exists. If yes, send AF state as
8311 // partial result for better latency.
8312 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8313 pendingRequest.focusStateSent = true;
8314 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8315 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8316 frame_number, fwkEarlyAfState);
8317 }
8318 }
8319 }
8320 }
8321 }
8322 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8323 &mAfTrigger.trigger, 1);
8324 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8325
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008326 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8327 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008328 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008329 int32_t afRegions[REGIONS_TUPLE_COUNT];
8330 // Adjust crop region from sensor output coordinate system to active
8331 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008332 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8333 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008334
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008335 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008336 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8337 REGIONS_TUPLE_COUNT);
8338 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8339 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008340 hAfRect.left, hAfRect.top, hAfRect.width,
8341 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008342 }
8343
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008344 // AF region confidence
8345 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8346 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8347 }
8348
Thierry Strudel3d639192016-09-09 11:52:26 -07008349 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8350 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8351 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8352 if (NAME_NOT_FOUND != val) {
8353 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8354 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8355 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8356 } else {
8357 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8358 }
8359 }
8360
8361 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8362 uint32_t aeMode = CAM_AE_MODE_MAX;
8363 int32_t flashMode = CAM_FLASH_MODE_MAX;
8364 int32_t redeye = -1;
8365 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8366 aeMode = *pAeMode;
8367 }
8368 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8369 flashMode = *pFlashMode;
8370 }
8371 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8372 redeye = *pRedeye;
8373 }
8374
8375 if (1 == redeye) {
8376 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8377 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8378 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8379 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8380 flashMode);
8381 if (NAME_NOT_FOUND != val) {
8382 fwk_aeMode = (uint8_t)val;
8383 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8384 } else {
8385 LOGE("Unsupported flash mode %d", flashMode);
8386 }
8387 } else if (aeMode == CAM_AE_MODE_ON) {
8388 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8389 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8390 } else if (aeMode == CAM_AE_MODE_OFF) {
8391 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8392 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008393 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8394 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8395 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008396 } else {
8397 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8398 "flashMode:%d, aeMode:%u!!!",
8399 redeye, flashMode, aeMode);
8400 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008401 if (mInstantAEC) {
8402 // Increment frame Idx count untill a bound reached for instant AEC.
8403 mInstantAecFrameIdxCount++;
8404 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8405 CAM_INTF_META_AEC_INFO, metadata) {
8406 LOGH("ae_params->settled = %d",ae_params->settled);
8407 // If AEC settled, or if number of frames reached bound value,
8408 // should reset instant AEC.
8409 if (ae_params->settled ||
8410 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8411 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8412 mInstantAEC = false;
8413 mResetInstantAEC = true;
8414 mInstantAecFrameIdxCount = 0;
8415 }
8416 }
8417 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008418 resultMetadata = camMetadata.release();
8419 return resultMetadata;
8420}
8421
8422/*===========================================================================
8423 * FUNCTION : dumpMetadataToFile
8424 *
8425 * DESCRIPTION: Dumps tuning metadata to file system
8426 *
8427 * PARAMETERS :
8428 * @meta : tuning metadata
8429 * @dumpFrameCount : current dump frame count
8430 * @enabled : Enable mask
8431 *
8432 *==========================================================================*/
8433void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8434 uint32_t &dumpFrameCount,
8435 bool enabled,
8436 const char *type,
8437 uint32_t frameNumber)
8438{
8439 //Some sanity checks
8440 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8441 LOGE("Tuning sensor data size bigger than expected %d: %d",
8442 meta.tuning_sensor_data_size,
8443 TUNING_SENSOR_DATA_MAX);
8444 return;
8445 }
8446
8447 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8448 LOGE("Tuning VFE data size bigger than expected %d: %d",
8449 meta.tuning_vfe_data_size,
8450 TUNING_VFE_DATA_MAX);
8451 return;
8452 }
8453
8454 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8455 LOGE("Tuning CPP data size bigger than expected %d: %d",
8456 meta.tuning_cpp_data_size,
8457 TUNING_CPP_DATA_MAX);
8458 return;
8459 }
8460
8461 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8462 LOGE("Tuning CAC data size bigger than expected %d: %d",
8463 meta.tuning_cac_data_size,
8464 TUNING_CAC_DATA_MAX);
8465 return;
8466 }
8467 //
8468
8469 if(enabled){
8470 char timeBuf[FILENAME_MAX];
8471 char buf[FILENAME_MAX];
8472 memset(buf, 0, sizeof(buf));
8473 memset(timeBuf, 0, sizeof(timeBuf));
8474 time_t current_time;
8475 struct tm * timeinfo;
8476 time (&current_time);
8477 timeinfo = localtime (&current_time);
8478 if (timeinfo != NULL) {
8479 strftime (timeBuf, sizeof(timeBuf),
8480 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8481 }
8482 String8 filePath(timeBuf);
8483 snprintf(buf,
8484 sizeof(buf),
8485 "%dm_%s_%d.bin",
8486 dumpFrameCount,
8487 type,
8488 frameNumber);
8489 filePath.append(buf);
8490 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8491 if (file_fd >= 0) {
8492 ssize_t written_len = 0;
8493 meta.tuning_data_version = TUNING_DATA_VERSION;
8494 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8495 written_len += write(file_fd, data, sizeof(uint32_t));
8496 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8497 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8498 written_len += write(file_fd, data, sizeof(uint32_t));
8499 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8500 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8501 written_len += write(file_fd, data, sizeof(uint32_t));
8502 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8503 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8504 written_len += write(file_fd, data, sizeof(uint32_t));
8505 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8506 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8507 written_len += write(file_fd, data, sizeof(uint32_t));
8508 meta.tuning_mod3_data_size = 0;
8509 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8510 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8511 written_len += write(file_fd, data, sizeof(uint32_t));
8512 size_t total_size = meta.tuning_sensor_data_size;
8513 data = (void *)((uint8_t *)&meta.data);
8514 written_len += write(file_fd, data, total_size);
8515 total_size = meta.tuning_vfe_data_size;
8516 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8517 written_len += write(file_fd, data, total_size);
8518 total_size = meta.tuning_cpp_data_size;
8519 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8520 written_len += write(file_fd, data, total_size);
8521 total_size = meta.tuning_cac_data_size;
8522 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8523 written_len += write(file_fd, data, total_size);
8524 close(file_fd);
8525 }else {
8526 LOGE("fail to open file for metadata dumping");
8527 }
8528 }
8529}
8530
8531/*===========================================================================
8532 * FUNCTION : cleanAndSortStreamInfo
8533 *
8534 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8535 * and sort them such that raw stream is at the end of the list
8536 * This is a workaround for camera daemon constraint.
8537 *
8538 * PARAMETERS : None
8539 *
8540 *==========================================================================*/
8541void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8542{
8543 List<stream_info_t *> newStreamInfo;
8544
8545 /*clean up invalid streams*/
8546 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8547 it != mStreamInfo.end();) {
8548 if(((*it)->status) == INVALID){
8549 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8550 delete channel;
8551 free(*it);
8552 it = mStreamInfo.erase(it);
8553 } else {
8554 it++;
8555 }
8556 }
8557
8558 // Move preview/video/callback/snapshot streams into newList
8559 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8560 it != mStreamInfo.end();) {
8561 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8562 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8563 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8564 newStreamInfo.push_back(*it);
8565 it = mStreamInfo.erase(it);
8566 } else
8567 it++;
8568 }
8569 // Move raw streams into newList
8570 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8571 it != mStreamInfo.end();) {
8572 newStreamInfo.push_back(*it);
8573 it = mStreamInfo.erase(it);
8574 }
8575
8576 mStreamInfo = newStreamInfo;
8577}
8578
8579/*===========================================================================
8580 * FUNCTION : extractJpegMetadata
8581 *
8582 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8583 * JPEG metadata is cached in HAL, and return as part of capture
8584 * result when metadata is returned from camera daemon.
8585 *
8586 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8587 * @request: capture request
8588 *
8589 *==========================================================================*/
8590void QCamera3HardwareInterface::extractJpegMetadata(
8591 CameraMetadata& jpegMetadata,
8592 const camera3_capture_request_t *request)
8593{
8594 CameraMetadata frame_settings;
8595 frame_settings = request->settings;
8596
8597 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8598 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8599 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8600 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8601
8602 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8603 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8604 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8605 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8606
8607 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8608 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8609 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8610 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8611
8612 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8613 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8614 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8615 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8616
8617 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8618 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8619 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8620 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8621
8622 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8623 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8624 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8625 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8626
8627 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8628 int32_t thumbnail_size[2];
8629 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8630 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8631 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8632 int32_t orientation =
8633 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008634 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008635 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8636 int32_t temp;
8637 temp = thumbnail_size[0];
8638 thumbnail_size[0] = thumbnail_size[1];
8639 thumbnail_size[1] = temp;
8640 }
8641 }
8642 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8643 thumbnail_size,
8644 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8645 }
8646
8647}
8648
8649/*===========================================================================
8650 * FUNCTION : convertToRegions
8651 *
8652 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8653 *
8654 * PARAMETERS :
8655 * @rect : cam_rect_t struct to convert
8656 * @region : int32_t destination array
8657 * @weight : if we are converting from cam_area_t, weight is valid
8658 * else weight = -1
8659 *
8660 *==========================================================================*/
8661void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8662 int32_t *region, int weight)
8663{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008664 region[FACE_LEFT] = rect.left;
8665 region[FACE_TOP] = rect.top;
8666 region[FACE_RIGHT] = rect.left + rect.width;
8667 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008668 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008669 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008670 }
8671}
8672
8673/*===========================================================================
8674 * FUNCTION : convertFromRegions
8675 *
8676 * DESCRIPTION: helper method to convert from array to cam_rect_t
8677 *
8678 * PARAMETERS :
8679 * @rect : cam_rect_t struct to convert
8680 * @region : int32_t destination array
8681 * @weight : if we are converting from cam_area_t, weight is valid
8682 * else weight = -1
8683 *
8684 *==========================================================================*/
8685void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008686 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008687{
Thierry Strudel3d639192016-09-09 11:52:26 -07008688 int32_t x_min = frame_settings.find(tag).data.i32[0];
8689 int32_t y_min = frame_settings.find(tag).data.i32[1];
8690 int32_t x_max = frame_settings.find(tag).data.i32[2];
8691 int32_t y_max = frame_settings.find(tag).data.i32[3];
8692 roi.weight = frame_settings.find(tag).data.i32[4];
8693 roi.rect.left = x_min;
8694 roi.rect.top = y_min;
8695 roi.rect.width = x_max - x_min;
8696 roi.rect.height = y_max - y_min;
8697}
8698
8699/*===========================================================================
8700 * FUNCTION : resetIfNeededROI
8701 *
8702 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8703 * crop region
8704 *
8705 * PARAMETERS :
8706 * @roi : cam_area_t struct to resize
8707 * @scalerCropRegion : cam_crop_region_t region to compare against
8708 *
8709 *
8710 *==========================================================================*/
8711bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8712 const cam_crop_region_t* scalerCropRegion)
8713{
8714 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8715 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8716 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8717 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8718
8719 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8720 * without having this check the calculations below to validate if the roi
8721 * is inside scalar crop region will fail resulting in the roi not being
8722 * reset causing algorithm to continue to use stale roi window
8723 */
8724 if (roi->weight == 0) {
8725 return true;
8726 }
8727
8728 if ((roi_x_max < scalerCropRegion->left) ||
8729 // right edge of roi window is left of scalar crop's left edge
8730 (roi_y_max < scalerCropRegion->top) ||
8731 // bottom edge of roi window is above scalar crop's top edge
8732 (roi->rect.left > crop_x_max) ||
8733 // left edge of roi window is beyond(right) of scalar crop's right edge
8734 (roi->rect.top > crop_y_max)){
8735 // top edge of roi windo is above scalar crop's top edge
8736 return false;
8737 }
8738 if (roi->rect.left < scalerCropRegion->left) {
8739 roi->rect.left = scalerCropRegion->left;
8740 }
8741 if (roi->rect.top < scalerCropRegion->top) {
8742 roi->rect.top = scalerCropRegion->top;
8743 }
8744 if (roi_x_max > crop_x_max) {
8745 roi_x_max = crop_x_max;
8746 }
8747 if (roi_y_max > crop_y_max) {
8748 roi_y_max = crop_y_max;
8749 }
8750 roi->rect.width = roi_x_max - roi->rect.left;
8751 roi->rect.height = roi_y_max - roi->rect.top;
8752 return true;
8753}
8754
8755/*===========================================================================
8756 * FUNCTION : convertLandmarks
8757 *
8758 * DESCRIPTION: helper method to extract the landmarks from face detection info
8759 *
8760 * PARAMETERS :
8761 * @landmark_data : input landmark data to be converted
8762 * @landmarks : int32_t destination array
8763 *
8764 *
8765 *==========================================================================*/
8766void QCamera3HardwareInterface::convertLandmarks(
8767 cam_face_landmarks_info_t landmark_data,
8768 int32_t *landmarks)
8769{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008770 if (landmark_data.is_left_eye_valid) {
8771 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8772 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8773 } else {
8774 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8775 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8776 }
8777
8778 if (landmark_data.is_right_eye_valid) {
8779 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8780 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8781 } else {
8782 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8783 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8784 }
8785
8786 if (landmark_data.is_mouth_valid) {
8787 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8788 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8789 } else {
8790 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8791 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8792 }
8793}
8794
8795/*===========================================================================
8796 * FUNCTION : setInvalidLandmarks
8797 *
8798 * DESCRIPTION: helper method to set invalid landmarks
8799 *
8800 * PARAMETERS :
8801 * @landmarks : int32_t destination array
8802 *
8803 *
8804 *==========================================================================*/
8805void QCamera3HardwareInterface::setInvalidLandmarks(
8806 int32_t *landmarks)
8807{
8808 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8809 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8810 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8811 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8812 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8813 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008814}
8815
8816#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008817
8818/*===========================================================================
8819 * FUNCTION : getCapabilities
8820 *
8821 * DESCRIPTION: query camera capability from back-end
8822 *
8823 * PARAMETERS :
8824 * @ops : mm-interface ops structure
8825 * @cam_handle : camera handle for which we need capability
8826 *
8827 * RETURN : ptr type of capability structure
8828 * capability for success
8829 * NULL for failure
8830 *==========================================================================*/
8831cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8832 uint32_t cam_handle)
8833{
8834 int rc = NO_ERROR;
8835 QCamera3HeapMemory *capabilityHeap = NULL;
8836 cam_capability_t *cap_ptr = NULL;
8837
8838 if (ops == NULL) {
8839 LOGE("Invalid arguments");
8840 return NULL;
8841 }
8842
8843 capabilityHeap = new QCamera3HeapMemory(1);
8844 if (capabilityHeap == NULL) {
8845 LOGE("creation of capabilityHeap failed");
8846 return NULL;
8847 }
8848
8849 /* Allocate memory for capability buffer */
8850 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8851 if(rc != OK) {
8852 LOGE("No memory for cappability");
8853 goto allocate_failed;
8854 }
8855
8856 /* Map memory for capability buffer */
8857 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8858
8859 rc = ops->map_buf(cam_handle,
8860 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8861 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8862 if(rc < 0) {
8863 LOGE("failed to map capability buffer");
8864 rc = FAILED_TRANSACTION;
8865 goto map_failed;
8866 }
8867
8868 /* Query Capability */
8869 rc = ops->query_capability(cam_handle);
8870 if(rc < 0) {
8871 LOGE("failed to query capability");
8872 rc = FAILED_TRANSACTION;
8873 goto query_failed;
8874 }
8875
8876 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8877 if (cap_ptr == NULL) {
8878 LOGE("out of memory");
8879 rc = NO_MEMORY;
8880 goto query_failed;
8881 }
8882
8883 memset(cap_ptr, 0, sizeof(cam_capability_t));
8884 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8885
8886 int index;
8887 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8888 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8889 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8890 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8891 }
8892
8893query_failed:
8894 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8895map_failed:
8896 capabilityHeap->deallocate();
8897allocate_failed:
8898 delete capabilityHeap;
8899
8900 if (rc != NO_ERROR) {
8901 return NULL;
8902 } else {
8903 return cap_ptr;
8904 }
8905}
8906
Thierry Strudel3d639192016-09-09 11:52:26 -07008907/*===========================================================================
8908 * FUNCTION : initCapabilities
8909 *
8910 * DESCRIPTION: initialize camera capabilities in static data struct
8911 *
8912 * PARAMETERS :
8913 * @cameraId : camera Id
8914 *
8915 * RETURN : int32_t type of status
8916 * NO_ERROR -- success
8917 * none-zero failure code
8918 *==========================================================================*/
8919int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8920{
8921 int rc = 0;
8922 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008923 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008924
8925 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8926 if (rc) {
8927 LOGE("camera_open failed. rc = %d", rc);
8928 goto open_failed;
8929 }
8930 if (!cameraHandle) {
8931 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8932 goto open_failed;
8933 }
8934
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008935 handle = get_main_camera_handle(cameraHandle->camera_handle);
8936 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8937 if (gCamCapability[cameraId] == NULL) {
8938 rc = FAILED_TRANSACTION;
8939 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008940 }
8941
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008942 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008943 if (is_dual_camera_by_idx(cameraId)) {
8944 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8945 gCamCapability[cameraId]->aux_cam_cap =
8946 getCapabilities(cameraHandle->ops, handle);
8947 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8948 rc = FAILED_TRANSACTION;
8949 free(gCamCapability[cameraId]);
8950 goto failed_op;
8951 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008952
8953 // Copy the main camera capability to main_cam_cap struct
8954 gCamCapability[cameraId]->main_cam_cap =
8955 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8956 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8957 LOGE("out of memory");
8958 rc = NO_MEMORY;
8959 goto failed_op;
8960 }
8961 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8962 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008963 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008964failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008965 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8966 cameraHandle = NULL;
8967open_failed:
8968 return rc;
8969}
8970
8971/*==========================================================================
8972 * FUNCTION : get3Aversion
8973 *
8974 * DESCRIPTION: get the Q3A S/W version
8975 *
8976 * PARAMETERS :
8977 * @sw_version: Reference of Q3A structure which will hold version info upon
8978 * return
8979 *
8980 * RETURN : None
8981 *
8982 *==========================================================================*/
8983void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8984{
8985 if(gCamCapability[mCameraId])
8986 sw_version = gCamCapability[mCameraId]->q3a_version;
8987 else
8988 LOGE("Capability structure NULL!");
8989}
8990
8991
8992/*===========================================================================
8993 * FUNCTION : initParameters
8994 *
8995 * DESCRIPTION: initialize camera parameters
8996 *
8997 * PARAMETERS :
8998 *
8999 * RETURN : int32_t type of status
9000 * NO_ERROR -- success
9001 * none-zero failure code
9002 *==========================================================================*/
9003int QCamera3HardwareInterface::initParameters()
9004{
9005 int rc = 0;
9006
9007 //Allocate Set Param Buffer
9008 mParamHeap = new QCamera3HeapMemory(1);
9009 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9010 if(rc != OK) {
9011 rc = NO_MEMORY;
9012 LOGE("Failed to allocate SETPARM Heap memory");
9013 delete mParamHeap;
9014 mParamHeap = NULL;
9015 return rc;
9016 }
9017
9018 //Map memory for parameters buffer
9019 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9020 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9021 mParamHeap->getFd(0),
9022 sizeof(metadata_buffer_t),
9023 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9024 if(rc < 0) {
9025 LOGE("failed to map SETPARM buffer");
9026 rc = FAILED_TRANSACTION;
9027 mParamHeap->deallocate();
9028 delete mParamHeap;
9029 mParamHeap = NULL;
9030 return rc;
9031 }
9032
9033 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9034
9035 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9036 return rc;
9037}
9038
9039/*===========================================================================
9040 * FUNCTION : deinitParameters
9041 *
9042 * DESCRIPTION: de-initialize camera parameters
9043 *
9044 * PARAMETERS :
9045 *
9046 * RETURN : NONE
9047 *==========================================================================*/
9048void QCamera3HardwareInterface::deinitParameters()
9049{
9050 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9051 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9052
9053 mParamHeap->deallocate();
9054 delete mParamHeap;
9055 mParamHeap = NULL;
9056
9057 mParameters = NULL;
9058
9059 free(mPrevParameters);
9060 mPrevParameters = NULL;
9061}
9062
9063/*===========================================================================
9064 * FUNCTION : calcMaxJpegSize
9065 *
9066 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9067 *
9068 * PARAMETERS :
9069 *
9070 * RETURN : max_jpeg_size
9071 *==========================================================================*/
9072size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9073{
9074 size_t max_jpeg_size = 0;
9075 size_t temp_width, temp_height;
9076 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9077 MAX_SIZES_CNT);
9078 for (size_t i = 0; i < count; i++) {
9079 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9080 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9081 if (temp_width * temp_height > max_jpeg_size ) {
9082 max_jpeg_size = temp_width * temp_height;
9083 }
9084 }
9085 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9086 return max_jpeg_size;
9087}
9088
9089/*===========================================================================
9090 * FUNCTION : getMaxRawSize
9091 *
9092 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9093 *
9094 * PARAMETERS :
9095 *
9096 * RETURN : Largest supported Raw Dimension
9097 *==========================================================================*/
9098cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9099{
9100 int max_width = 0;
9101 cam_dimension_t maxRawSize;
9102
9103 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9104 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9105 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9106 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9107 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9108 }
9109 }
9110 return maxRawSize;
9111}
9112
9113
9114/*===========================================================================
9115 * FUNCTION : calcMaxJpegDim
9116 *
9117 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9118 *
9119 * PARAMETERS :
9120 *
9121 * RETURN : max_jpeg_dim
9122 *==========================================================================*/
9123cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9124{
9125 cam_dimension_t max_jpeg_dim;
9126 cam_dimension_t curr_jpeg_dim;
9127 max_jpeg_dim.width = 0;
9128 max_jpeg_dim.height = 0;
9129 curr_jpeg_dim.width = 0;
9130 curr_jpeg_dim.height = 0;
9131 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9132 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9133 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9134 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9135 max_jpeg_dim.width * max_jpeg_dim.height ) {
9136 max_jpeg_dim.width = curr_jpeg_dim.width;
9137 max_jpeg_dim.height = curr_jpeg_dim.height;
9138 }
9139 }
9140 return max_jpeg_dim;
9141}
9142
9143/*===========================================================================
9144 * FUNCTION : addStreamConfig
9145 *
9146 * DESCRIPTION: adds the stream configuration to the array
9147 *
9148 * PARAMETERS :
9149 * @available_stream_configs : pointer to stream configuration array
9150 * @scalar_format : scalar format
9151 * @dim : configuration dimension
9152 * @config_type : input or output configuration type
9153 *
9154 * RETURN : NONE
9155 *==========================================================================*/
9156void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9157 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9158{
9159 available_stream_configs.add(scalar_format);
9160 available_stream_configs.add(dim.width);
9161 available_stream_configs.add(dim.height);
9162 available_stream_configs.add(config_type);
9163}
9164
9165/*===========================================================================
9166 * FUNCTION : suppportBurstCapture
9167 *
9168 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9169 *
9170 * PARAMETERS :
9171 * @cameraId : camera Id
9172 *
9173 * RETURN : true if camera supports BURST_CAPTURE
9174 * false otherwise
9175 *==========================================================================*/
9176bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9177{
9178 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9179 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9180 const int32_t highResWidth = 3264;
9181 const int32_t highResHeight = 2448;
9182
9183 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9184 // Maximum resolution images cannot be captured at >= 10fps
9185 // -> not supporting BURST_CAPTURE
9186 return false;
9187 }
9188
9189 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9190 // Maximum resolution images can be captured at >= 20fps
9191 // --> supporting BURST_CAPTURE
9192 return true;
9193 }
9194
9195 // Find the smallest highRes resolution, or largest resolution if there is none
9196 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9197 MAX_SIZES_CNT);
9198 size_t highRes = 0;
9199 while ((highRes + 1 < totalCnt) &&
9200 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9201 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9202 highResWidth * highResHeight)) {
9203 highRes++;
9204 }
9205 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9206 return true;
9207 } else {
9208 return false;
9209 }
9210}
9211
9212/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009213 * FUNCTION : getPDStatIndex
9214 *
9215 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9216 *
9217 * PARAMETERS :
9218 * @caps : camera capabilities
9219 *
9220 * RETURN : int32_t type
9221 * non-negative - on success
9222 * -1 - on failure
9223 *==========================================================================*/
9224int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9225 if (nullptr == caps) {
9226 return -1;
9227 }
9228
9229 uint32_t metaRawCount = caps->meta_raw_channel_count;
9230 int32_t ret = -1;
9231 for (size_t i = 0; i < metaRawCount; i++) {
9232 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9233 ret = i;
9234 break;
9235 }
9236 }
9237
9238 return ret;
9239}
9240
9241/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009242 * FUNCTION : initStaticMetadata
9243 *
9244 * DESCRIPTION: initialize the static metadata
9245 *
9246 * PARAMETERS :
9247 * @cameraId : camera Id
9248 *
9249 * RETURN : int32_t type of status
9250 * 0 -- success
9251 * non-zero failure code
9252 *==========================================================================*/
9253int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9254{
9255 int rc = 0;
9256 CameraMetadata staticInfo;
9257 size_t count = 0;
9258 bool limitedDevice = false;
9259 char prop[PROPERTY_VALUE_MAX];
9260 bool supportBurst = false;
9261
9262 supportBurst = supportBurstCapture(cameraId);
9263
9264 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9265 * guaranteed or if min fps of max resolution is less than 20 fps, its
9266 * advertised as limited device*/
9267 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9268 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9269 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9270 !supportBurst;
9271
9272 uint8_t supportedHwLvl = limitedDevice ?
9273 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009274#ifndef USE_HAL_3_3
9275 // LEVEL_3 - This device will support level 3.
9276 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9277#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009278 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009279#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009280
9281 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9282 &supportedHwLvl, 1);
9283
9284 bool facingBack = false;
9285 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9286 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9287 facingBack = true;
9288 }
9289 /*HAL 3 only*/
9290 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9291 &gCamCapability[cameraId]->min_focus_distance, 1);
9292
9293 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9294 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9295
9296 /*should be using focal lengths but sensor doesn't provide that info now*/
9297 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9298 &gCamCapability[cameraId]->focal_length,
9299 1);
9300
9301 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9302 gCamCapability[cameraId]->apertures,
9303 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9304
9305 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9306 gCamCapability[cameraId]->filter_densities,
9307 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9308
9309
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009310 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9311 size_t mode_count =
9312 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9313 for (size_t i = 0; i < mode_count; i++) {
9314 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9315 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009316 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009317 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009318
9319 int32_t lens_shading_map_size[] = {
9320 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9321 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9322 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9323 lens_shading_map_size,
9324 sizeof(lens_shading_map_size)/sizeof(int32_t));
9325
9326 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9327 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9328
9329 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9330 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9331
9332 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9333 &gCamCapability[cameraId]->max_frame_duration, 1);
9334
9335 camera_metadata_rational baseGainFactor = {
9336 gCamCapability[cameraId]->base_gain_factor.numerator,
9337 gCamCapability[cameraId]->base_gain_factor.denominator};
9338 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9339 &baseGainFactor, 1);
9340
9341 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9342 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9343
9344 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9345 gCamCapability[cameraId]->pixel_array_size.height};
9346 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9347 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9348
9349 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9350 gCamCapability[cameraId]->active_array_size.top,
9351 gCamCapability[cameraId]->active_array_size.width,
9352 gCamCapability[cameraId]->active_array_size.height};
9353 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9354 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9355
9356 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9357 &gCamCapability[cameraId]->white_level, 1);
9358
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009359 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9360 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9361 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009362 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009363 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009364
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009365#ifndef USE_HAL_3_3
9366 bool hasBlackRegions = false;
9367 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9368 LOGW("black_region_count: %d is bounded to %d",
9369 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9370 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9371 }
9372 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9373 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9374 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9375 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9376 }
9377 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9378 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9379 hasBlackRegions = true;
9380 }
9381#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009382 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9383 &gCamCapability[cameraId]->flash_charge_duration, 1);
9384
9385 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9386 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9387
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009388 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9389 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9390 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009391 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9392 &timestampSource, 1);
9393
Thierry Strudel54dc9782017-02-15 12:12:10 -08009394 //update histogram vendor data
9395 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009396 &gCamCapability[cameraId]->histogram_size, 1);
9397
Thierry Strudel54dc9782017-02-15 12:12:10 -08009398 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009399 &gCamCapability[cameraId]->max_histogram_count, 1);
9400
Shuzhen Wang14415f52016-11-16 18:26:18 -08009401 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9402 //so that app can request fewer number of bins than the maximum supported.
9403 std::vector<int32_t> histBins;
9404 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9405 histBins.push_back(maxHistBins);
9406 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9407 (maxHistBins & 0x1) == 0) {
9408 histBins.push_back(maxHistBins >> 1);
9409 maxHistBins >>= 1;
9410 }
9411 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9412 histBins.data(), histBins.size());
9413
Thierry Strudel3d639192016-09-09 11:52:26 -07009414 int32_t sharpness_map_size[] = {
9415 gCamCapability[cameraId]->sharpness_map_size.width,
9416 gCamCapability[cameraId]->sharpness_map_size.height};
9417
9418 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9419 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9420
9421 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9422 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9423
Emilian Peev0f3c3162017-03-15 12:57:46 +00009424 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9425 if (0 <= indexPD) {
9426 // Advertise PD stats data as part of the Depth capabilities
9427 int32_t depthWidth =
9428 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9429 int32_t depthHeight =
9430 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009431 int32_t depthStride =
9432 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009433 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9434 assert(0 < depthSamplesCount);
9435 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9436 &depthSamplesCount, 1);
9437
9438 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9439 depthHeight,
9440 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9441 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9442 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9443 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9444 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9445
9446 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9447 depthHeight, 33333333,
9448 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9449 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9450 depthMinDuration,
9451 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9452
9453 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9454 depthHeight, 0,
9455 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9456 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9457 depthStallDuration,
9458 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9459
9460 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9461 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009462
9463 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9464 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9465 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009466 }
9467
Thierry Strudel3d639192016-09-09 11:52:26 -07009468 int32_t scalar_formats[] = {
9469 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9470 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9471 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9472 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9473 HAL_PIXEL_FORMAT_RAW10,
9474 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009475 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9476 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9477 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009478
9479 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9480 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9481 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9482 count, MAX_SIZES_CNT, available_processed_sizes);
9483 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9484 available_processed_sizes, count * 2);
9485
9486 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9487 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9488 makeTable(gCamCapability[cameraId]->raw_dim,
9489 count, MAX_SIZES_CNT, available_raw_sizes);
9490 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9491 available_raw_sizes, count * 2);
9492
9493 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9494 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9495 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9496 count, MAX_SIZES_CNT, available_fps_ranges);
9497 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9498 available_fps_ranges, count * 2);
9499
9500 camera_metadata_rational exposureCompensationStep = {
9501 gCamCapability[cameraId]->exp_compensation_step.numerator,
9502 gCamCapability[cameraId]->exp_compensation_step.denominator};
9503 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9504 &exposureCompensationStep, 1);
9505
9506 Vector<uint8_t> availableVstabModes;
9507 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9508 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009509 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009510 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009511 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009512 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009513 count = IS_TYPE_MAX;
9514 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9515 for (size_t i = 0; i < count; i++) {
9516 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9517 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9518 eisSupported = true;
9519 break;
9520 }
9521 }
9522 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009523 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9524 }
9525 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9526 availableVstabModes.array(), availableVstabModes.size());
9527
9528 /*HAL 1 and HAL 3 common*/
9529 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9530 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9531 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009532 // Cap the max zoom to the max preferred value
9533 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009534 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9535 &maxZoom, 1);
9536
9537 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9538 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9539
9540 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9541 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9542 max3aRegions[2] = 0; /* AF not supported */
9543 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9544 max3aRegions, 3);
9545
9546 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9547 memset(prop, 0, sizeof(prop));
9548 property_get("persist.camera.facedetect", prop, "1");
9549 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9550 LOGD("Support face detection mode: %d",
9551 supportedFaceDetectMode);
9552
9553 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009554 /* support mode should be OFF if max number of face is 0 */
9555 if (maxFaces <= 0) {
9556 supportedFaceDetectMode = 0;
9557 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009558 Vector<uint8_t> availableFaceDetectModes;
9559 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9560 if (supportedFaceDetectMode == 1) {
9561 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9562 } else if (supportedFaceDetectMode == 2) {
9563 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9564 } else if (supportedFaceDetectMode == 3) {
9565 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9566 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9567 } else {
9568 maxFaces = 0;
9569 }
9570 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9571 availableFaceDetectModes.array(),
9572 availableFaceDetectModes.size());
9573 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9574 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009575 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9576 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9577 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009578
9579 int32_t exposureCompensationRange[] = {
9580 gCamCapability[cameraId]->exposure_compensation_min,
9581 gCamCapability[cameraId]->exposure_compensation_max};
9582 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9583 exposureCompensationRange,
9584 sizeof(exposureCompensationRange)/sizeof(int32_t));
9585
9586 uint8_t lensFacing = (facingBack) ?
9587 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9588 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9589
9590 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9591 available_thumbnail_sizes,
9592 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9593
9594 /*all sizes will be clubbed into this tag*/
9595 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9596 /*android.scaler.availableStreamConfigurations*/
9597 Vector<int32_t> available_stream_configs;
9598 cam_dimension_t active_array_dim;
9599 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9600 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009601
9602 /*advertise list of input dimensions supported based on below property.
9603 By default all sizes upto 5MP will be advertised.
9604 Note that the setprop resolution format should be WxH.
9605 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9606 To list all supported sizes, setprop needs to be set with "0x0" */
9607 cam_dimension_t minInputSize = {2592,1944}; //5MP
9608 memset(prop, 0, sizeof(prop));
9609 property_get("persist.camera.input.minsize", prop, "2592x1944");
9610 if (strlen(prop) > 0) {
9611 char *saveptr = NULL;
9612 char *token = strtok_r(prop, "x", &saveptr);
9613 if (token != NULL) {
9614 minInputSize.width = atoi(token);
9615 }
9616 token = strtok_r(NULL, "x", &saveptr);
9617 if (token != NULL) {
9618 minInputSize.height = atoi(token);
9619 }
9620 }
9621
Thierry Strudel3d639192016-09-09 11:52:26 -07009622 /* Add input/output stream configurations for each scalar formats*/
9623 for (size_t j = 0; j < scalar_formats_count; j++) {
9624 switch (scalar_formats[j]) {
9625 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9626 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9627 case HAL_PIXEL_FORMAT_RAW10:
9628 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9629 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9630 addStreamConfig(available_stream_configs, scalar_formats[j],
9631 gCamCapability[cameraId]->raw_dim[i],
9632 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9633 }
9634 break;
9635 case HAL_PIXEL_FORMAT_BLOB:
9636 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9637 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9638 addStreamConfig(available_stream_configs, scalar_formats[j],
9639 gCamCapability[cameraId]->picture_sizes_tbl[i],
9640 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9641 }
9642 break;
9643 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9644 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9645 default:
9646 cam_dimension_t largest_picture_size;
9647 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9648 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9649 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9650 addStreamConfig(available_stream_configs, scalar_formats[j],
9651 gCamCapability[cameraId]->picture_sizes_tbl[i],
9652 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009653 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009654 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9655 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009656 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9657 >= minInputSize.width) || (gCamCapability[cameraId]->
9658 picture_sizes_tbl[i].height >= minInputSize.height)) {
9659 addStreamConfig(available_stream_configs, scalar_formats[j],
9660 gCamCapability[cameraId]->picture_sizes_tbl[i],
9661 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9662 }
9663 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009664 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009665
Thierry Strudel3d639192016-09-09 11:52:26 -07009666 break;
9667 }
9668 }
9669
9670 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9671 available_stream_configs.array(), available_stream_configs.size());
9672 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9673 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9674
9675 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9676 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9677
9678 /* android.scaler.availableMinFrameDurations */
9679 Vector<int64_t> available_min_durations;
9680 for (size_t j = 0; j < scalar_formats_count; j++) {
9681 switch (scalar_formats[j]) {
9682 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9683 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9684 case HAL_PIXEL_FORMAT_RAW10:
9685 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9686 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9687 available_min_durations.add(scalar_formats[j]);
9688 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9689 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9690 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9691 }
9692 break;
9693 default:
9694 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9695 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9696 available_min_durations.add(scalar_formats[j]);
9697 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9698 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9699 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9700 }
9701 break;
9702 }
9703 }
9704 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9705 available_min_durations.array(), available_min_durations.size());
9706
9707 Vector<int32_t> available_hfr_configs;
9708 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9709 int32_t fps = 0;
9710 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9711 case CAM_HFR_MODE_60FPS:
9712 fps = 60;
9713 break;
9714 case CAM_HFR_MODE_90FPS:
9715 fps = 90;
9716 break;
9717 case CAM_HFR_MODE_120FPS:
9718 fps = 120;
9719 break;
9720 case CAM_HFR_MODE_150FPS:
9721 fps = 150;
9722 break;
9723 case CAM_HFR_MODE_180FPS:
9724 fps = 180;
9725 break;
9726 case CAM_HFR_MODE_210FPS:
9727 fps = 210;
9728 break;
9729 case CAM_HFR_MODE_240FPS:
9730 fps = 240;
9731 break;
9732 case CAM_HFR_MODE_480FPS:
9733 fps = 480;
9734 break;
9735 case CAM_HFR_MODE_OFF:
9736 case CAM_HFR_MODE_MAX:
9737 default:
9738 break;
9739 }
9740
9741 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9742 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9743 /* For each HFR frame rate, need to advertise one variable fps range
9744 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9745 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9746 * set by the app. When video recording is started, [120, 120] is
9747 * set. This way sensor configuration does not change when recording
9748 * is started */
9749
9750 /* (width, height, fps_min, fps_max, batch_size_max) */
9751 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9752 j < MAX_SIZES_CNT; j++) {
9753 available_hfr_configs.add(
9754 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9755 available_hfr_configs.add(
9756 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9757 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9758 available_hfr_configs.add(fps);
9759 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9760
9761 /* (width, height, fps_min, fps_max, batch_size_max) */
9762 available_hfr_configs.add(
9763 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9764 available_hfr_configs.add(
9765 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9766 available_hfr_configs.add(fps);
9767 available_hfr_configs.add(fps);
9768 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9769 }
9770 }
9771 }
9772 //Advertise HFR capability only if the property is set
9773 memset(prop, 0, sizeof(prop));
9774 property_get("persist.camera.hal3hfr.enable", prop, "1");
9775 uint8_t hfrEnable = (uint8_t)atoi(prop);
9776
9777 if(hfrEnable && available_hfr_configs.array()) {
9778 staticInfo.update(
9779 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9780 available_hfr_configs.array(), available_hfr_configs.size());
9781 }
9782
9783 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9784 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9785 &max_jpeg_size, 1);
9786
9787 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9788 size_t size = 0;
9789 count = CAM_EFFECT_MODE_MAX;
9790 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9791 for (size_t i = 0; i < count; i++) {
9792 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9793 gCamCapability[cameraId]->supported_effects[i]);
9794 if (NAME_NOT_FOUND != val) {
9795 avail_effects[size] = (uint8_t)val;
9796 size++;
9797 }
9798 }
9799 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9800 avail_effects,
9801 size);
9802
9803 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9804 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9805 size_t supported_scene_modes_cnt = 0;
9806 count = CAM_SCENE_MODE_MAX;
9807 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9808 for (size_t i = 0; i < count; i++) {
9809 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9810 CAM_SCENE_MODE_OFF) {
9811 int val = lookupFwkName(SCENE_MODES_MAP,
9812 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9813 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009814
Thierry Strudel3d639192016-09-09 11:52:26 -07009815 if (NAME_NOT_FOUND != val) {
9816 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9817 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9818 supported_scene_modes_cnt++;
9819 }
9820 }
9821 }
9822 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9823 avail_scene_modes,
9824 supported_scene_modes_cnt);
9825
9826 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9827 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9828 supported_scene_modes_cnt,
9829 CAM_SCENE_MODE_MAX,
9830 scene_mode_overrides,
9831 supported_indexes,
9832 cameraId);
9833
9834 if (supported_scene_modes_cnt == 0) {
9835 supported_scene_modes_cnt = 1;
9836 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9837 }
9838
9839 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9840 scene_mode_overrides, supported_scene_modes_cnt * 3);
9841
9842 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9843 ANDROID_CONTROL_MODE_AUTO,
9844 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9845 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9846 available_control_modes,
9847 3);
9848
9849 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9850 size = 0;
9851 count = CAM_ANTIBANDING_MODE_MAX;
9852 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9853 for (size_t i = 0; i < count; i++) {
9854 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9855 gCamCapability[cameraId]->supported_antibandings[i]);
9856 if (NAME_NOT_FOUND != val) {
9857 avail_antibanding_modes[size] = (uint8_t)val;
9858 size++;
9859 }
9860
9861 }
9862 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9863 avail_antibanding_modes,
9864 size);
9865
9866 uint8_t avail_abberation_modes[] = {
9867 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9868 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9869 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9870 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9871 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9872 if (0 == count) {
9873 // If no aberration correction modes are available for a device, this advertise OFF mode
9874 size = 1;
9875 } else {
9876 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9877 // So, advertize all 3 modes if atleast any one mode is supported as per the
9878 // new M requirement
9879 size = 3;
9880 }
9881 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9882 avail_abberation_modes,
9883 size);
9884
9885 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9886 size = 0;
9887 count = CAM_FOCUS_MODE_MAX;
9888 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9889 for (size_t i = 0; i < count; i++) {
9890 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9891 gCamCapability[cameraId]->supported_focus_modes[i]);
9892 if (NAME_NOT_FOUND != val) {
9893 avail_af_modes[size] = (uint8_t)val;
9894 size++;
9895 }
9896 }
9897 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9898 avail_af_modes,
9899 size);
9900
9901 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9902 size = 0;
9903 count = CAM_WB_MODE_MAX;
9904 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9905 for (size_t i = 0; i < count; i++) {
9906 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9907 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9908 gCamCapability[cameraId]->supported_white_balances[i]);
9909 if (NAME_NOT_FOUND != val) {
9910 avail_awb_modes[size] = (uint8_t)val;
9911 size++;
9912 }
9913 }
9914 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9915 avail_awb_modes,
9916 size);
9917
9918 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9919 count = CAM_FLASH_FIRING_LEVEL_MAX;
9920 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9921 count);
9922 for (size_t i = 0; i < count; i++) {
9923 available_flash_levels[i] =
9924 gCamCapability[cameraId]->supported_firing_levels[i];
9925 }
9926 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9927 available_flash_levels, count);
9928
9929 uint8_t flashAvailable;
9930 if (gCamCapability[cameraId]->flash_available)
9931 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9932 else
9933 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9934 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9935 &flashAvailable, 1);
9936
9937 Vector<uint8_t> avail_ae_modes;
9938 count = CAM_AE_MODE_MAX;
9939 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9940 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009941 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9942 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9943 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9944 }
9945 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009946 }
9947 if (flashAvailable) {
9948 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9949 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9950 }
9951 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9952 avail_ae_modes.array(),
9953 avail_ae_modes.size());
9954
9955 int32_t sensitivity_range[2];
9956 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9957 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9958 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9959 sensitivity_range,
9960 sizeof(sensitivity_range) / sizeof(int32_t));
9961
9962 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9963 &gCamCapability[cameraId]->max_analog_sensitivity,
9964 1);
9965
9966 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9967 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9968 &sensor_orientation,
9969 1);
9970
9971 int32_t max_output_streams[] = {
9972 MAX_STALLING_STREAMS,
9973 MAX_PROCESSED_STREAMS,
9974 MAX_RAW_STREAMS};
9975 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9976 max_output_streams,
9977 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9978
9979 uint8_t avail_leds = 0;
9980 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9981 &avail_leds, 0);
9982
9983 uint8_t focus_dist_calibrated;
9984 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9985 gCamCapability[cameraId]->focus_dist_calibrated);
9986 if (NAME_NOT_FOUND != val) {
9987 focus_dist_calibrated = (uint8_t)val;
9988 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9989 &focus_dist_calibrated, 1);
9990 }
9991
9992 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9993 size = 0;
9994 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9995 MAX_TEST_PATTERN_CNT);
9996 for (size_t i = 0; i < count; i++) {
9997 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9998 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9999 if (NAME_NOT_FOUND != testpatternMode) {
10000 avail_testpattern_modes[size] = testpatternMode;
10001 size++;
10002 }
10003 }
10004 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10005 avail_testpattern_modes,
10006 size);
10007
10008 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10009 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10010 &max_pipeline_depth,
10011 1);
10012
10013 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10014 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10015 &partial_result_count,
10016 1);
10017
10018 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10019 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10020
10021 Vector<uint8_t> available_capabilities;
10022 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10023 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10024 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10025 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10026 if (supportBurst) {
10027 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10028 }
10029 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10030 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10031 if (hfrEnable && available_hfr_configs.array()) {
10032 available_capabilities.add(
10033 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10034 }
10035
10036 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10037 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10038 }
10039 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10040 available_capabilities.array(),
10041 available_capabilities.size());
10042
10043 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10044 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10045 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10046 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10047
10048 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10049 &aeLockAvailable, 1);
10050
10051 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10052 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10053 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10054 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10055
10056 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10057 &awbLockAvailable, 1);
10058
10059 int32_t max_input_streams = 1;
10060 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10061 &max_input_streams,
10062 1);
10063
10064 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10065 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10066 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10067 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10068 HAL_PIXEL_FORMAT_YCbCr_420_888};
10069 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10070 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10071
10072 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10073 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10074 &max_latency,
10075 1);
10076
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010077#ifndef USE_HAL_3_3
10078 int32_t isp_sensitivity_range[2];
10079 isp_sensitivity_range[0] =
10080 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10081 isp_sensitivity_range[1] =
10082 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10083 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10084 isp_sensitivity_range,
10085 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10086#endif
10087
Thierry Strudel3d639192016-09-09 11:52:26 -070010088 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10089 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10090 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10091 available_hot_pixel_modes,
10092 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10093
10094 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10095 ANDROID_SHADING_MODE_FAST,
10096 ANDROID_SHADING_MODE_HIGH_QUALITY};
10097 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10098 available_shading_modes,
10099 3);
10100
10101 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10102 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10103 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10104 available_lens_shading_map_modes,
10105 2);
10106
10107 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10108 ANDROID_EDGE_MODE_FAST,
10109 ANDROID_EDGE_MODE_HIGH_QUALITY,
10110 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10111 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10112 available_edge_modes,
10113 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10114
10115 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10116 ANDROID_NOISE_REDUCTION_MODE_FAST,
10117 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10118 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10119 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10120 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10121 available_noise_red_modes,
10122 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10123
10124 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10125 ANDROID_TONEMAP_MODE_FAST,
10126 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10127 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10128 available_tonemap_modes,
10129 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10130
10131 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10132 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10133 available_hot_pixel_map_modes,
10134 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10135
10136 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10137 gCamCapability[cameraId]->reference_illuminant1);
10138 if (NAME_NOT_FOUND != val) {
10139 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10140 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10141 }
10142
10143 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10144 gCamCapability[cameraId]->reference_illuminant2);
10145 if (NAME_NOT_FOUND != val) {
10146 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10147 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10148 }
10149
10150 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10151 (void *)gCamCapability[cameraId]->forward_matrix1,
10152 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10153
10154 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10155 (void *)gCamCapability[cameraId]->forward_matrix2,
10156 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10157
10158 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10159 (void *)gCamCapability[cameraId]->color_transform1,
10160 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10161
10162 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10163 (void *)gCamCapability[cameraId]->color_transform2,
10164 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10165
10166 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10167 (void *)gCamCapability[cameraId]->calibration_transform1,
10168 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10169
10170 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10171 (void *)gCamCapability[cameraId]->calibration_transform2,
10172 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10173
10174 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10175 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10176 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10177 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10178 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10179 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10180 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10181 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10182 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10183 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10184 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10185 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10186 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10187 ANDROID_JPEG_GPS_COORDINATES,
10188 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10189 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10190 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10191 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10192 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10193 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10194 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10195 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10196 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10197 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010198#ifndef USE_HAL_3_3
10199 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10200#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010201 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010202 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010203 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10204 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010205 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010206 /* DevCamDebug metadata request_keys_basic */
10207 DEVCAMDEBUG_META_ENABLE,
10208 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010209 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010210 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010211 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010212 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010213 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010214 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010215
10216 size_t request_keys_cnt =
10217 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10218 Vector<int32_t> available_request_keys;
10219 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10220 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10221 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10222 }
10223
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010224 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010225 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010226 }
10227
Thierry Strudel3d639192016-09-09 11:52:26 -070010228 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10229 available_request_keys.array(), available_request_keys.size());
10230
10231 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10232 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10233 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10234 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10235 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10236 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10237 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10238 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10239 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10240 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10241 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10242 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10243 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10244 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10245 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10246 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10247 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010248 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010249 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10250 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10251 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010252 ANDROID_STATISTICS_FACE_SCORES,
10253#ifndef USE_HAL_3_3
10254 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10255#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010256 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010257 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010258 // DevCamDebug metadata result_keys_basic
10259 DEVCAMDEBUG_META_ENABLE,
10260 // DevCamDebug metadata result_keys AF
10261 DEVCAMDEBUG_AF_LENS_POSITION,
10262 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10263 DEVCAMDEBUG_AF_TOF_DISTANCE,
10264 DEVCAMDEBUG_AF_LUMA,
10265 DEVCAMDEBUG_AF_HAF_STATE,
10266 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10267 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10268 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10269 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10270 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10271 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10272 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10273 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10274 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10275 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10276 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10277 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10278 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10279 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10280 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10281 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10282 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10283 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10284 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10285 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10286 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10287 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10288 // DevCamDebug metadata result_keys AEC
10289 DEVCAMDEBUG_AEC_TARGET_LUMA,
10290 DEVCAMDEBUG_AEC_COMP_LUMA,
10291 DEVCAMDEBUG_AEC_AVG_LUMA,
10292 DEVCAMDEBUG_AEC_CUR_LUMA,
10293 DEVCAMDEBUG_AEC_LINECOUNT,
10294 DEVCAMDEBUG_AEC_REAL_GAIN,
10295 DEVCAMDEBUG_AEC_EXP_INDEX,
10296 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010297 // DevCamDebug metadata result_keys zzHDR
10298 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10299 DEVCAMDEBUG_AEC_L_LINECOUNT,
10300 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10301 DEVCAMDEBUG_AEC_S_LINECOUNT,
10302 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10303 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10304 // DevCamDebug metadata result_keys ADRC
10305 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10306 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10307 DEVCAMDEBUG_AEC_GTM_RATIO,
10308 DEVCAMDEBUG_AEC_LTM_RATIO,
10309 DEVCAMDEBUG_AEC_LA_RATIO,
10310 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010311 // DevCamDebug metadata result_keys AEC MOTION
10312 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10313 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10314 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010315 // DevCamDebug metadata result_keys AWB
10316 DEVCAMDEBUG_AWB_R_GAIN,
10317 DEVCAMDEBUG_AWB_G_GAIN,
10318 DEVCAMDEBUG_AWB_B_GAIN,
10319 DEVCAMDEBUG_AWB_CCT,
10320 DEVCAMDEBUG_AWB_DECISION,
10321 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010322 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10323 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10324 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010325 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010326 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010327 };
10328
Thierry Strudel3d639192016-09-09 11:52:26 -070010329 size_t result_keys_cnt =
10330 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10331
10332 Vector<int32_t> available_result_keys;
10333 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10334 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10335 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10336 }
10337 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10338 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10339 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10340 }
10341 if (supportedFaceDetectMode == 1) {
10342 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10343 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10344 } else if ((supportedFaceDetectMode == 2) ||
10345 (supportedFaceDetectMode == 3)) {
10346 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10347 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10348 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010349#ifndef USE_HAL_3_3
10350 if (hasBlackRegions) {
10351 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10352 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10353 }
10354#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010355
10356 if (gExposeEnableZslKey) {
10357 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10358 }
10359
Thierry Strudel3d639192016-09-09 11:52:26 -070010360 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10361 available_result_keys.array(), available_result_keys.size());
10362
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010363 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010364 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10365 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10366 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10367 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10368 ANDROID_SCALER_CROPPING_TYPE,
10369 ANDROID_SYNC_MAX_LATENCY,
10370 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10371 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10372 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10373 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10374 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10375 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10376 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10377 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10378 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10379 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10380 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10381 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10382 ANDROID_LENS_FACING,
10383 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10384 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10385 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10386 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10387 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10388 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10389 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10390 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10391 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10392 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10393 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10394 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10395 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10396 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10397 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10398 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10399 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10400 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10401 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10402 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010403 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010404 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10405 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10406 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10407 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10408 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10409 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10410 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10411 ANDROID_CONTROL_AVAILABLE_MODES,
10412 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10413 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10414 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10415 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010416 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10417#ifndef USE_HAL_3_3
10418 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10419 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10420#endif
10421 };
10422
10423 Vector<int32_t> available_characteristics_keys;
10424 available_characteristics_keys.appendArray(characteristics_keys_basic,
10425 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10426#ifndef USE_HAL_3_3
10427 if (hasBlackRegions) {
10428 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10429 }
10430#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010431
10432 if (0 <= indexPD) {
10433 int32_t depthKeys[] = {
10434 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10435 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10436 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10437 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10438 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10439 };
10440 available_characteristics_keys.appendArray(depthKeys,
10441 sizeof(depthKeys) / sizeof(depthKeys[0]));
10442 }
10443
Thierry Strudel3d639192016-09-09 11:52:26 -070010444 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010445 available_characteristics_keys.array(),
10446 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010447
10448 /*available stall durations depend on the hw + sw and will be different for different devices */
10449 /*have to add for raw after implementation*/
10450 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10451 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10452
10453 Vector<int64_t> available_stall_durations;
10454 for (uint32_t j = 0; j < stall_formats_count; j++) {
10455 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10456 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10457 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10458 available_stall_durations.add(stall_formats[j]);
10459 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10460 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10461 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10462 }
10463 } else {
10464 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10465 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10466 available_stall_durations.add(stall_formats[j]);
10467 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10468 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10469 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10470 }
10471 }
10472 }
10473 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10474 available_stall_durations.array(),
10475 available_stall_durations.size());
10476
10477 //QCAMERA3_OPAQUE_RAW
10478 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10479 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10480 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10481 case LEGACY_RAW:
10482 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10483 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10484 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10485 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10486 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10487 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10488 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10489 break;
10490 case MIPI_RAW:
10491 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10492 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10493 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10494 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10495 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10496 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10497 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10498 break;
10499 default:
10500 LOGE("unknown opaque_raw_format %d",
10501 gCamCapability[cameraId]->opaque_raw_fmt);
10502 break;
10503 }
10504 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10505
10506 Vector<int32_t> strides;
10507 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10508 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10509 cam_stream_buf_plane_info_t buf_planes;
10510 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10511 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10512 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10513 &gCamCapability[cameraId]->padding_info, &buf_planes);
10514 strides.add(buf_planes.plane_info.mp[0].stride);
10515 }
10516 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10517 strides.size());
10518
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010519 //TBD: remove the following line once backend advertises zzHDR in feature mask
10520 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010521 //Video HDR default
10522 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10523 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010524 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010525 int32_t vhdr_mode[] = {
10526 QCAMERA3_VIDEO_HDR_MODE_OFF,
10527 QCAMERA3_VIDEO_HDR_MODE_ON};
10528
10529 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10530 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10531 vhdr_mode, vhdr_mode_count);
10532 }
10533
Thierry Strudel3d639192016-09-09 11:52:26 -070010534 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10535 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10536 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10537
10538 uint8_t isMonoOnly =
10539 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10540 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10541 &isMonoOnly, 1);
10542
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010543#ifndef USE_HAL_3_3
10544 Vector<int32_t> opaque_size;
10545 for (size_t j = 0; j < scalar_formats_count; j++) {
10546 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10547 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10548 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10549 cam_stream_buf_plane_info_t buf_planes;
10550
10551 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10552 &gCamCapability[cameraId]->padding_info, &buf_planes);
10553
10554 if (rc == 0) {
10555 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10556 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10557 opaque_size.add(buf_planes.plane_info.frame_len);
10558 }else {
10559 LOGE("raw frame calculation failed!");
10560 }
10561 }
10562 }
10563 }
10564
10565 if ((opaque_size.size() > 0) &&
10566 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10567 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10568 else
10569 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10570#endif
10571
Thierry Strudel04e026f2016-10-10 11:27:36 -070010572 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10573 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10574 size = 0;
10575 count = CAM_IR_MODE_MAX;
10576 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10577 for (size_t i = 0; i < count; i++) {
10578 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10579 gCamCapability[cameraId]->supported_ir_modes[i]);
10580 if (NAME_NOT_FOUND != val) {
10581 avail_ir_modes[size] = (int32_t)val;
10582 size++;
10583 }
10584 }
10585 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10586 avail_ir_modes, size);
10587 }
10588
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010589 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10590 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10591 size = 0;
10592 count = CAM_AEC_CONVERGENCE_MAX;
10593 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10594 for (size_t i = 0; i < count; i++) {
10595 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10596 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10597 if (NAME_NOT_FOUND != val) {
10598 available_instant_aec_modes[size] = (int32_t)val;
10599 size++;
10600 }
10601 }
10602 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10603 available_instant_aec_modes, size);
10604 }
10605
Thierry Strudel54dc9782017-02-15 12:12:10 -080010606 int32_t sharpness_range[] = {
10607 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10608 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10609 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10610
10611 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10612 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10613 size = 0;
10614 count = CAM_BINNING_CORRECTION_MODE_MAX;
10615 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10616 for (size_t i = 0; i < count; i++) {
10617 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10618 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10619 gCamCapability[cameraId]->supported_binning_modes[i]);
10620 if (NAME_NOT_FOUND != val) {
10621 avail_binning_modes[size] = (int32_t)val;
10622 size++;
10623 }
10624 }
10625 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10626 avail_binning_modes, size);
10627 }
10628
10629 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10630 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10631 size = 0;
10632 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10633 for (size_t i = 0; i < count; i++) {
10634 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10635 gCamCapability[cameraId]->supported_aec_modes[i]);
10636 if (NAME_NOT_FOUND != val)
10637 available_aec_modes[size++] = val;
10638 }
10639 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10640 available_aec_modes, size);
10641 }
10642
10643 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10644 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10645 size = 0;
10646 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10647 for (size_t i = 0; i < count; i++) {
10648 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10649 gCamCapability[cameraId]->supported_iso_modes[i]);
10650 if (NAME_NOT_FOUND != val)
10651 available_iso_modes[size++] = val;
10652 }
10653 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10654 available_iso_modes, size);
10655 }
10656
10657 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010658 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010659 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10660 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10661 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10662
10663 int32_t available_saturation_range[4];
10664 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10665 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10666 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10667 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10668 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10669 available_saturation_range, 4);
10670
10671 uint8_t is_hdr_values[2];
10672 is_hdr_values[0] = 0;
10673 is_hdr_values[1] = 1;
10674 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10675 is_hdr_values, 2);
10676
10677 float is_hdr_confidence_range[2];
10678 is_hdr_confidence_range[0] = 0.0;
10679 is_hdr_confidence_range[1] = 1.0;
10680 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10681 is_hdr_confidence_range, 2);
10682
Emilian Peev0a972ef2017-03-16 10:25:53 +000010683 size_t eepromLength = strnlen(
10684 reinterpret_cast<const char *>(
10685 gCamCapability[cameraId]->eeprom_version_info),
10686 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10687 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010688 char easelInfo[] = ",E:N";
10689 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10690 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10691 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010692 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10693 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010694 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010695 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010696 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10697 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10698 }
10699
Thierry Strudel3d639192016-09-09 11:52:26 -070010700 gStaticMetadata[cameraId] = staticInfo.release();
10701 return rc;
10702}
10703
10704/*===========================================================================
10705 * FUNCTION : makeTable
10706 *
10707 * DESCRIPTION: make a table of sizes
10708 *
10709 * PARAMETERS :
10710 *
10711 *
10712 *==========================================================================*/
10713void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10714 size_t max_size, int32_t *sizeTable)
10715{
10716 size_t j = 0;
10717 if (size > max_size) {
10718 size = max_size;
10719 }
10720 for (size_t i = 0; i < size; i++) {
10721 sizeTable[j] = dimTable[i].width;
10722 sizeTable[j+1] = dimTable[i].height;
10723 j+=2;
10724 }
10725}
10726
10727/*===========================================================================
10728 * FUNCTION : makeFPSTable
10729 *
10730 * DESCRIPTION: make a table of fps ranges
10731 *
10732 * PARAMETERS :
10733 *
10734 *==========================================================================*/
10735void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10736 size_t max_size, int32_t *fpsRangesTable)
10737{
10738 size_t j = 0;
10739 if (size > max_size) {
10740 size = max_size;
10741 }
10742 for (size_t i = 0; i < size; i++) {
10743 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10744 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10745 j+=2;
10746 }
10747}
10748
10749/*===========================================================================
10750 * FUNCTION : makeOverridesList
10751 *
10752 * DESCRIPTION: make a list of scene mode overrides
10753 *
10754 * PARAMETERS :
10755 *
10756 *
10757 *==========================================================================*/
10758void QCamera3HardwareInterface::makeOverridesList(
10759 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10760 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10761{
10762 /*daemon will give a list of overrides for all scene modes.
10763 However we should send the fwk only the overrides for the scene modes
10764 supported by the framework*/
10765 size_t j = 0;
10766 if (size > max_size) {
10767 size = max_size;
10768 }
10769 size_t focus_count = CAM_FOCUS_MODE_MAX;
10770 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10771 focus_count);
10772 for (size_t i = 0; i < size; i++) {
10773 bool supt = false;
10774 size_t index = supported_indexes[i];
10775 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10776 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10777 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10778 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10779 overridesTable[index].awb_mode);
10780 if (NAME_NOT_FOUND != val) {
10781 overridesList[j+1] = (uint8_t)val;
10782 }
10783 uint8_t focus_override = overridesTable[index].af_mode;
10784 for (size_t k = 0; k < focus_count; k++) {
10785 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10786 supt = true;
10787 break;
10788 }
10789 }
10790 if (supt) {
10791 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10792 focus_override);
10793 if (NAME_NOT_FOUND != val) {
10794 overridesList[j+2] = (uint8_t)val;
10795 }
10796 } else {
10797 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10798 }
10799 j+=3;
10800 }
10801}
10802
10803/*===========================================================================
10804 * FUNCTION : filterJpegSizes
10805 *
10806 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10807 * could be downscaled to
10808 *
10809 * PARAMETERS :
10810 *
10811 * RETURN : length of jpegSizes array
10812 *==========================================================================*/
10813
10814size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10815 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10816 uint8_t downscale_factor)
10817{
10818 if (0 == downscale_factor) {
10819 downscale_factor = 1;
10820 }
10821
10822 int32_t min_width = active_array_size.width / downscale_factor;
10823 int32_t min_height = active_array_size.height / downscale_factor;
10824 size_t jpegSizesCnt = 0;
10825 if (processedSizesCnt > maxCount) {
10826 processedSizesCnt = maxCount;
10827 }
10828 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10829 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10830 jpegSizes[jpegSizesCnt] = processedSizes[i];
10831 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10832 jpegSizesCnt += 2;
10833 }
10834 }
10835 return jpegSizesCnt;
10836}
10837
10838/*===========================================================================
10839 * FUNCTION : computeNoiseModelEntryS
10840 *
10841 * DESCRIPTION: function to map a given sensitivity to the S noise
10842 * model parameters in the DNG noise model.
10843 *
10844 * PARAMETERS : sens : the sensor sensitivity
10845 *
10846 ** RETURN : S (sensor amplification) noise
10847 *
10848 *==========================================================================*/
10849double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10850 double s = gCamCapability[mCameraId]->gradient_S * sens +
10851 gCamCapability[mCameraId]->offset_S;
10852 return ((s < 0.0) ? 0.0 : s);
10853}
10854
10855/*===========================================================================
10856 * FUNCTION : computeNoiseModelEntryO
10857 *
10858 * DESCRIPTION: function to map a given sensitivity to the O noise
10859 * model parameters in the DNG noise model.
10860 *
10861 * PARAMETERS : sens : the sensor sensitivity
10862 *
10863 ** RETURN : O (sensor readout) noise
10864 *
10865 *==========================================================================*/
10866double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10867 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10868 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10869 1.0 : (1.0 * sens / max_analog_sens);
10870 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10871 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10872 return ((o < 0.0) ? 0.0 : o);
10873}
10874
10875/*===========================================================================
10876 * FUNCTION : getSensorSensitivity
10877 *
10878 * DESCRIPTION: convert iso_mode to an integer value
10879 *
10880 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10881 *
10882 ** RETURN : sensitivity supported by sensor
10883 *
10884 *==========================================================================*/
10885int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10886{
10887 int32_t sensitivity;
10888
10889 switch (iso_mode) {
10890 case CAM_ISO_MODE_100:
10891 sensitivity = 100;
10892 break;
10893 case CAM_ISO_MODE_200:
10894 sensitivity = 200;
10895 break;
10896 case CAM_ISO_MODE_400:
10897 sensitivity = 400;
10898 break;
10899 case CAM_ISO_MODE_800:
10900 sensitivity = 800;
10901 break;
10902 case CAM_ISO_MODE_1600:
10903 sensitivity = 1600;
10904 break;
10905 default:
10906 sensitivity = -1;
10907 break;
10908 }
10909 return sensitivity;
10910}
10911
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010912int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010913 if (gEaselManagerClient == nullptr) {
10914 gEaselManagerClient = EaselManagerClient::create();
10915 if (gEaselManagerClient == nullptr) {
10916 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10917 return -ENODEV;
10918 }
10919 }
10920
10921 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010922 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10923 // to connect to Easel.
10924 bool doNotpowerOnEasel =
10925 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10926
10927 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010928 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10929 return OK;
10930 }
10931
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010932 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010933 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010934 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010935 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010936 return res;
10937 }
10938
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010939 EaselManagerClientOpened = true;
10940
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010941 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010942 if (res != OK) {
10943 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10944 }
10945
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010946 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010947 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010948
10949 // Expose enableZsl key only when HDR+ mode is enabled.
10950 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010951 }
10952
10953 return OK;
10954}
10955
Thierry Strudel3d639192016-09-09 11:52:26 -070010956/*===========================================================================
10957 * FUNCTION : getCamInfo
10958 *
10959 * DESCRIPTION: query camera capabilities
10960 *
10961 * PARAMETERS :
10962 * @cameraId : camera Id
10963 * @info : camera info struct to be filled in with camera capabilities
10964 *
10965 * RETURN : int type of status
10966 * NO_ERROR -- success
10967 * none-zero failure code
10968 *==========================================================================*/
10969int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10970 struct camera_info *info)
10971{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010972 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010973 int rc = 0;
10974
10975 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010976
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010977 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070010978 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010979 rc = initHdrPlusClientLocked();
10980 if (rc != OK) {
10981 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10982 pthread_mutex_unlock(&gCamLock);
10983 return rc;
10984 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010985 }
10986
Thierry Strudel3d639192016-09-09 11:52:26 -070010987 if (NULL == gCamCapability[cameraId]) {
10988 rc = initCapabilities(cameraId);
10989 if (rc < 0) {
10990 pthread_mutex_unlock(&gCamLock);
10991 return rc;
10992 }
10993 }
10994
10995 if (NULL == gStaticMetadata[cameraId]) {
10996 rc = initStaticMetadata(cameraId);
10997 if (rc < 0) {
10998 pthread_mutex_unlock(&gCamLock);
10999 return rc;
11000 }
11001 }
11002
11003 switch(gCamCapability[cameraId]->position) {
11004 case CAM_POSITION_BACK:
11005 case CAM_POSITION_BACK_AUX:
11006 info->facing = CAMERA_FACING_BACK;
11007 break;
11008
11009 case CAM_POSITION_FRONT:
11010 case CAM_POSITION_FRONT_AUX:
11011 info->facing = CAMERA_FACING_FRONT;
11012 break;
11013
11014 default:
11015 LOGE("Unknown position type %d for camera id:%d",
11016 gCamCapability[cameraId]->position, cameraId);
11017 rc = -1;
11018 break;
11019 }
11020
11021
11022 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011023#ifndef USE_HAL_3_3
11024 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11025#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011026 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011027#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011028 info->static_camera_characteristics = gStaticMetadata[cameraId];
11029
11030 //For now assume both cameras can operate independently.
11031 info->conflicting_devices = NULL;
11032 info->conflicting_devices_length = 0;
11033
11034 //resource cost is 100 * MIN(1.0, m/M),
11035 //where m is throughput requirement with maximum stream configuration
11036 //and M is CPP maximum throughput.
11037 float max_fps = 0.0;
11038 for (uint32_t i = 0;
11039 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11040 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11041 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11042 }
11043 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11044 gCamCapability[cameraId]->active_array_size.width *
11045 gCamCapability[cameraId]->active_array_size.height * max_fps /
11046 gCamCapability[cameraId]->max_pixel_bandwidth;
11047 info->resource_cost = 100 * MIN(1.0, ratio);
11048 LOGI("camera %d resource cost is %d", cameraId,
11049 info->resource_cost);
11050
11051 pthread_mutex_unlock(&gCamLock);
11052 return rc;
11053}
11054
11055/*===========================================================================
11056 * FUNCTION : translateCapabilityToMetadata
11057 *
11058 * DESCRIPTION: translate the capability into camera_metadata_t
11059 *
11060 * PARAMETERS : type of the request
11061 *
11062 *
11063 * RETURN : success: camera_metadata_t*
11064 * failure: NULL
11065 *
11066 *==========================================================================*/
11067camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11068{
11069 if (mDefaultMetadata[type] != NULL) {
11070 return mDefaultMetadata[type];
11071 }
11072 //first time we are handling this request
11073 //fill up the metadata structure using the wrapper class
11074 CameraMetadata settings;
11075 //translate from cam_capability_t to camera_metadata_tag_t
11076 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11077 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11078 int32_t defaultRequestID = 0;
11079 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11080
11081 /* OIS disable */
11082 char ois_prop[PROPERTY_VALUE_MAX];
11083 memset(ois_prop, 0, sizeof(ois_prop));
11084 property_get("persist.camera.ois.disable", ois_prop, "0");
11085 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11086
11087 /* Force video to use OIS */
11088 char videoOisProp[PROPERTY_VALUE_MAX];
11089 memset(videoOisProp, 0, sizeof(videoOisProp));
11090 property_get("persist.camera.ois.video", videoOisProp, "1");
11091 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011092
11093 // Hybrid AE enable/disable
11094 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11095 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11096 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11097 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11098
Thierry Strudel3d639192016-09-09 11:52:26 -070011099 uint8_t controlIntent = 0;
11100 uint8_t focusMode;
11101 uint8_t vsMode;
11102 uint8_t optStabMode;
11103 uint8_t cacMode;
11104 uint8_t edge_mode;
11105 uint8_t noise_red_mode;
11106 uint8_t tonemap_mode;
11107 bool highQualityModeEntryAvailable = FALSE;
11108 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011109 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011110 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11111 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011112 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011113 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011114 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011115
Thierry Strudel3d639192016-09-09 11:52:26 -070011116 switch (type) {
11117 case CAMERA3_TEMPLATE_PREVIEW:
11118 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11119 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11120 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11121 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11122 edge_mode = ANDROID_EDGE_MODE_FAST;
11123 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11124 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11125 break;
11126 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11127 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11128 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11129 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11130 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11131 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11132 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11133 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11134 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11135 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11136 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11137 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11138 highQualityModeEntryAvailable = TRUE;
11139 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11140 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11141 fastModeEntryAvailable = TRUE;
11142 }
11143 }
11144 if (highQualityModeEntryAvailable) {
11145 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11146 } else if (fastModeEntryAvailable) {
11147 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11148 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011149 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11150 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11151 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011152 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011153 break;
11154 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11155 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11156 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11157 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011158 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11159 edge_mode = ANDROID_EDGE_MODE_FAST;
11160 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11161 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11162 if (forceVideoOis)
11163 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11164 break;
11165 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11166 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11167 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11168 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011169 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11170 edge_mode = ANDROID_EDGE_MODE_FAST;
11171 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11172 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11173 if (forceVideoOis)
11174 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11175 break;
11176 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11177 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11178 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11179 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11180 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11181 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11182 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11183 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11184 break;
11185 case CAMERA3_TEMPLATE_MANUAL:
11186 edge_mode = ANDROID_EDGE_MODE_FAST;
11187 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11188 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11189 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11190 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11191 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11192 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11193 break;
11194 default:
11195 edge_mode = ANDROID_EDGE_MODE_FAST;
11196 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11197 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11198 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11199 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11200 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11201 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11202 break;
11203 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011204 // Set CAC to OFF if underlying device doesn't support
11205 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11206 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11207 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011208 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11209 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11210 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11211 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11212 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11213 }
11214 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011215 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011216 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011217
11218 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11219 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11220 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11221 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11222 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11223 || ois_disable)
11224 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11225 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011226 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011227
11228 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11229 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11230
11231 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11232 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11233
11234 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11235 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11236
11237 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11238 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11239
11240 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11241 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11242
11243 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11244 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11245
11246 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11247 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11248
11249 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11250 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11251
11252 /*flash*/
11253 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11254 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11255
11256 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11257 settings.update(ANDROID_FLASH_FIRING_POWER,
11258 &flashFiringLevel, 1);
11259
11260 /* lens */
11261 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11262 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11263
11264 if (gCamCapability[mCameraId]->filter_densities_count) {
11265 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11266 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11267 gCamCapability[mCameraId]->filter_densities_count);
11268 }
11269
11270 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11271 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11272
Thierry Strudel3d639192016-09-09 11:52:26 -070011273 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11274 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11275
11276 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11277 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11278
11279 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11280 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11281
11282 /* face detection (default to OFF) */
11283 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11284 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11285
Thierry Strudel54dc9782017-02-15 12:12:10 -080011286 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11287 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011288
11289 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11290 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11291
11292 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11293 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11294
Thierry Strudel3d639192016-09-09 11:52:26 -070011295
11296 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11297 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11298
11299 /* Exposure time(Update the Min Exposure Time)*/
11300 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11301 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11302
11303 /* frame duration */
11304 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11305 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11306
11307 /* sensitivity */
11308 static const int32_t default_sensitivity = 100;
11309 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011310#ifndef USE_HAL_3_3
11311 static const int32_t default_isp_sensitivity =
11312 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11313 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11314#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011315
11316 /*edge mode*/
11317 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11318
11319 /*noise reduction mode*/
11320 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11321
11322 /*color correction mode*/
11323 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11324 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11325
11326 /*transform matrix mode*/
11327 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11328
11329 int32_t scaler_crop_region[4];
11330 scaler_crop_region[0] = 0;
11331 scaler_crop_region[1] = 0;
11332 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11333 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11334 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11335
11336 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11337 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11338
11339 /*focus distance*/
11340 float focus_distance = 0.0;
11341 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11342
11343 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011344 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011345 float max_range = 0.0;
11346 float max_fixed_fps = 0.0;
11347 int32_t fps_range[2] = {0, 0};
11348 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11349 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011350 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11351 TEMPLATE_MAX_PREVIEW_FPS) {
11352 continue;
11353 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011354 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11355 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11356 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11357 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11358 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11359 if (range > max_range) {
11360 fps_range[0] =
11361 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11362 fps_range[1] =
11363 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11364 max_range = range;
11365 }
11366 } else {
11367 if (range < 0.01 && max_fixed_fps <
11368 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11369 fps_range[0] =
11370 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11371 fps_range[1] =
11372 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11373 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11374 }
11375 }
11376 }
11377 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11378
11379 /*precapture trigger*/
11380 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11381 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11382
11383 /*af trigger*/
11384 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11385 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11386
11387 /* ae & af regions */
11388 int32_t active_region[] = {
11389 gCamCapability[mCameraId]->active_array_size.left,
11390 gCamCapability[mCameraId]->active_array_size.top,
11391 gCamCapability[mCameraId]->active_array_size.left +
11392 gCamCapability[mCameraId]->active_array_size.width,
11393 gCamCapability[mCameraId]->active_array_size.top +
11394 gCamCapability[mCameraId]->active_array_size.height,
11395 0};
11396 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11397 sizeof(active_region) / sizeof(active_region[0]));
11398 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11399 sizeof(active_region) / sizeof(active_region[0]));
11400
11401 /* black level lock */
11402 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11403 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11404
Thierry Strudel3d639192016-09-09 11:52:26 -070011405 //special defaults for manual template
11406 if (type == CAMERA3_TEMPLATE_MANUAL) {
11407 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11408 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11409
11410 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11411 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11412
11413 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11414 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11415
11416 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11417 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11418
11419 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11420 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11421
11422 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11423 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11424 }
11425
11426
11427 /* TNR
11428 * We'll use this location to determine which modes TNR will be set.
11429 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11430 * This is not to be confused with linking on a per stream basis that decision
11431 * is still on per-session basis and will be handled as part of config stream
11432 */
11433 uint8_t tnr_enable = 0;
11434
11435 if (m_bTnrPreview || m_bTnrVideo) {
11436
11437 switch (type) {
11438 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11439 tnr_enable = 1;
11440 break;
11441
11442 default:
11443 tnr_enable = 0;
11444 break;
11445 }
11446
11447 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11448 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11449 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11450
11451 LOGD("TNR:%d with process plate %d for template:%d",
11452 tnr_enable, tnr_process_type, type);
11453 }
11454
11455 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011456 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011457 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11458
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011459 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011460 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11461
Shuzhen Wang920ea402017-05-03 08:49:39 -070011462 uint8_t related_camera_id = mCameraId;
11463 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011464
11465 /* CDS default */
11466 char prop[PROPERTY_VALUE_MAX];
11467 memset(prop, 0, sizeof(prop));
11468 property_get("persist.camera.CDS", prop, "Auto");
11469 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11470 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11471 if (CAM_CDS_MODE_MAX == cds_mode) {
11472 cds_mode = CAM_CDS_MODE_AUTO;
11473 }
11474
11475 /* Disabling CDS in templates which have TNR enabled*/
11476 if (tnr_enable)
11477 cds_mode = CAM_CDS_MODE_OFF;
11478
11479 int32_t mode = cds_mode;
11480 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011481
Thierry Strudel269c81a2016-10-12 12:13:59 -070011482 /* Manual Convergence AEC Speed is disabled by default*/
11483 float default_aec_speed = 0;
11484 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11485
11486 /* Manual Convergence AWB Speed is disabled by default*/
11487 float default_awb_speed = 0;
11488 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11489
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011490 // Set instant AEC to normal convergence by default
11491 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11492 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11493
Shuzhen Wang19463d72016-03-08 11:09:52 -080011494 /* hybrid ae */
11495 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11496
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011497 if (gExposeEnableZslKey) {
11498 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11499 }
11500
Thierry Strudel3d639192016-09-09 11:52:26 -070011501 mDefaultMetadata[type] = settings.release();
11502
11503 return mDefaultMetadata[type];
11504}
11505
11506/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011507 * FUNCTION : getExpectedFrameDuration
11508 *
11509 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11510 * duration
11511 *
11512 * PARAMETERS :
11513 * @request : request settings
11514 * @frameDuration : The maximum frame duration in nanoseconds
11515 *
11516 * RETURN : None
11517 *==========================================================================*/
11518void QCamera3HardwareInterface::getExpectedFrameDuration(
11519 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11520 if (nullptr == frameDuration) {
11521 return;
11522 }
11523
11524 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11525 find_camera_metadata_ro_entry(request,
11526 ANDROID_SENSOR_EXPOSURE_TIME,
11527 &e);
11528 if (e.count > 0) {
11529 *frameDuration = e.data.i64[0];
11530 }
11531 find_camera_metadata_ro_entry(request,
11532 ANDROID_SENSOR_FRAME_DURATION,
11533 &e);
11534 if (e.count > 0) {
11535 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11536 }
11537}
11538
11539/*===========================================================================
11540 * FUNCTION : calculateMaxExpectedDuration
11541 *
11542 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11543 * current camera settings.
11544 *
11545 * PARAMETERS :
11546 * @request : request settings
11547 *
11548 * RETURN : Expected frame duration in nanoseconds.
11549 *==========================================================================*/
11550nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11551 const camera_metadata_t *request) {
11552 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11553 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11554 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11555 if (e.count == 0) {
11556 return maxExpectedDuration;
11557 }
11558
11559 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11560 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11561 }
11562
11563 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11564 return maxExpectedDuration;
11565 }
11566
11567 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11568 if (e.count == 0) {
11569 return maxExpectedDuration;
11570 }
11571
11572 switch (e.data.u8[0]) {
11573 case ANDROID_CONTROL_AE_MODE_OFF:
11574 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11575 break;
11576 default:
11577 find_camera_metadata_ro_entry(request,
11578 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11579 &e);
11580 if (e.count > 1) {
11581 maxExpectedDuration = 1e9 / e.data.u8[0];
11582 }
11583 break;
11584 }
11585
11586 return maxExpectedDuration;
11587}
11588
11589/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011590 * FUNCTION : setFrameParameters
11591 *
11592 * DESCRIPTION: set parameters per frame as requested in the metadata from
11593 * framework
11594 *
11595 * PARAMETERS :
11596 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011597 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011598 * @blob_request: Whether this request is a blob request or not
11599 *
11600 * RETURN : success: NO_ERROR
11601 * failure:
11602 *==========================================================================*/
11603int QCamera3HardwareInterface::setFrameParameters(
11604 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011605 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011606 int blob_request,
11607 uint32_t snapshotStreamId)
11608{
11609 /*translate from camera_metadata_t type to parm_type_t*/
11610 int rc = 0;
11611 int32_t hal_version = CAM_HAL_V3;
11612
11613 clear_metadata_buffer(mParameters);
11614 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11615 LOGE("Failed to set hal version in the parameters");
11616 return BAD_VALUE;
11617 }
11618
11619 /*we need to update the frame number in the parameters*/
11620 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11621 request->frame_number)) {
11622 LOGE("Failed to set the frame number in the parameters");
11623 return BAD_VALUE;
11624 }
11625
11626 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011627 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011628 LOGE("Failed to set stream type mask in the parameters");
11629 return BAD_VALUE;
11630 }
11631
11632 if (mUpdateDebugLevel) {
11633 uint32_t dummyDebugLevel = 0;
11634 /* The value of dummyDebugLevel is irrelavent. On
11635 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11636 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11637 dummyDebugLevel)) {
11638 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11639 return BAD_VALUE;
11640 }
11641 mUpdateDebugLevel = false;
11642 }
11643
11644 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011645 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011646 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11647 if (blob_request)
11648 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11649 }
11650
11651 return rc;
11652}
11653
11654/*===========================================================================
11655 * FUNCTION : setReprocParameters
11656 *
11657 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11658 * return it.
11659 *
11660 * PARAMETERS :
11661 * @request : request that needs to be serviced
11662 *
11663 * RETURN : success: NO_ERROR
11664 * failure:
11665 *==========================================================================*/
11666int32_t QCamera3HardwareInterface::setReprocParameters(
11667 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11668 uint32_t snapshotStreamId)
11669{
11670 /*translate from camera_metadata_t type to parm_type_t*/
11671 int rc = 0;
11672
11673 if (NULL == request->settings){
11674 LOGE("Reprocess settings cannot be NULL");
11675 return BAD_VALUE;
11676 }
11677
11678 if (NULL == reprocParam) {
11679 LOGE("Invalid reprocessing metadata buffer");
11680 return BAD_VALUE;
11681 }
11682 clear_metadata_buffer(reprocParam);
11683
11684 /*we need to update the frame number in the parameters*/
11685 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11686 request->frame_number)) {
11687 LOGE("Failed to set the frame number in the parameters");
11688 return BAD_VALUE;
11689 }
11690
11691 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11692 if (rc < 0) {
11693 LOGE("Failed to translate reproc request");
11694 return rc;
11695 }
11696
11697 CameraMetadata frame_settings;
11698 frame_settings = request->settings;
11699 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11700 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11701 int32_t *crop_count =
11702 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11703 int32_t *crop_data =
11704 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11705 int32_t *roi_map =
11706 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11707 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11708 cam_crop_data_t crop_meta;
11709 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11710 crop_meta.num_of_streams = 1;
11711 crop_meta.crop_info[0].crop.left = crop_data[0];
11712 crop_meta.crop_info[0].crop.top = crop_data[1];
11713 crop_meta.crop_info[0].crop.width = crop_data[2];
11714 crop_meta.crop_info[0].crop.height = crop_data[3];
11715
11716 crop_meta.crop_info[0].roi_map.left =
11717 roi_map[0];
11718 crop_meta.crop_info[0].roi_map.top =
11719 roi_map[1];
11720 crop_meta.crop_info[0].roi_map.width =
11721 roi_map[2];
11722 crop_meta.crop_info[0].roi_map.height =
11723 roi_map[3];
11724
11725 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11726 rc = BAD_VALUE;
11727 }
11728 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11729 request->input_buffer->stream,
11730 crop_meta.crop_info[0].crop.left,
11731 crop_meta.crop_info[0].crop.top,
11732 crop_meta.crop_info[0].crop.width,
11733 crop_meta.crop_info[0].crop.height);
11734 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11735 request->input_buffer->stream,
11736 crop_meta.crop_info[0].roi_map.left,
11737 crop_meta.crop_info[0].roi_map.top,
11738 crop_meta.crop_info[0].roi_map.width,
11739 crop_meta.crop_info[0].roi_map.height);
11740 } else {
11741 LOGE("Invalid reprocess crop count %d!", *crop_count);
11742 }
11743 } else {
11744 LOGE("No crop data from matching output stream");
11745 }
11746
11747 /* These settings are not needed for regular requests so handle them specially for
11748 reprocess requests; information needed for EXIF tags */
11749 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11750 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11751 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11752 if (NAME_NOT_FOUND != val) {
11753 uint32_t flashMode = (uint32_t)val;
11754 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11755 rc = BAD_VALUE;
11756 }
11757 } else {
11758 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11759 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11760 }
11761 } else {
11762 LOGH("No flash mode in reprocess settings");
11763 }
11764
11765 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11766 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11767 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11768 rc = BAD_VALUE;
11769 }
11770 } else {
11771 LOGH("No flash state in reprocess settings");
11772 }
11773
11774 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11775 uint8_t *reprocessFlags =
11776 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11777 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11778 *reprocessFlags)) {
11779 rc = BAD_VALUE;
11780 }
11781 }
11782
Thierry Strudel54dc9782017-02-15 12:12:10 -080011783 // Add exif debug data to internal metadata
11784 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11785 mm_jpeg_debug_exif_params_t *debug_params =
11786 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11787 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11788 // AE
11789 if (debug_params->ae_debug_params_valid == TRUE) {
11790 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11791 debug_params->ae_debug_params);
11792 }
11793 // AWB
11794 if (debug_params->awb_debug_params_valid == TRUE) {
11795 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11796 debug_params->awb_debug_params);
11797 }
11798 // AF
11799 if (debug_params->af_debug_params_valid == TRUE) {
11800 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11801 debug_params->af_debug_params);
11802 }
11803 // ASD
11804 if (debug_params->asd_debug_params_valid == TRUE) {
11805 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11806 debug_params->asd_debug_params);
11807 }
11808 // Stats
11809 if (debug_params->stats_debug_params_valid == TRUE) {
11810 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11811 debug_params->stats_debug_params);
11812 }
11813 // BE Stats
11814 if (debug_params->bestats_debug_params_valid == TRUE) {
11815 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11816 debug_params->bestats_debug_params);
11817 }
11818 // BHIST
11819 if (debug_params->bhist_debug_params_valid == TRUE) {
11820 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11821 debug_params->bhist_debug_params);
11822 }
11823 // 3A Tuning
11824 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11825 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11826 debug_params->q3a_tuning_debug_params);
11827 }
11828 }
11829
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011830 // Add metadata which reprocess needs
11831 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11832 cam_reprocess_info_t *repro_info =
11833 (cam_reprocess_info_t *)frame_settings.find
11834 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011835 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011836 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011837 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011838 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011839 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011840 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011841 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011842 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011843 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011844 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011845 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011846 repro_info->pipeline_flip);
11847 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11848 repro_info->af_roi);
11849 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11850 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011851 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11852 CAM_INTF_PARM_ROTATION metadata then has been added in
11853 translateToHalMetadata. HAL need to keep this new rotation
11854 metadata. Otherwise, the old rotation info saved in the vendor tag
11855 would be used */
11856 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11857 CAM_INTF_PARM_ROTATION, reprocParam) {
11858 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11859 } else {
11860 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011861 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011862 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011863 }
11864
11865 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11866 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11867 roi.width and roi.height would be the final JPEG size.
11868 For now, HAL only checks this for reprocess request */
11869 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11870 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11871 uint8_t *enable =
11872 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11873 if (*enable == TRUE) {
11874 int32_t *crop_data =
11875 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11876 cam_stream_crop_info_t crop_meta;
11877 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11878 crop_meta.stream_id = 0;
11879 crop_meta.crop.left = crop_data[0];
11880 crop_meta.crop.top = crop_data[1];
11881 crop_meta.crop.width = crop_data[2];
11882 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011883 // The JPEG crop roi should match cpp output size
11884 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11885 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11886 crop_meta.roi_map.left = 0;
11887 crop_meta.roi_map.top = 0;
11888 crop_meta.roi_map.width = cpp_crop->crop.width;
11889 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011890 }
11891 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11892 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011893 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011894 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011895 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11896 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011897 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011898 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11899
11900 // Add JPEG scale information
11901 cam_dimension_t scale_dim;
11902 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11903 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11904 int32_t *roi =
11905 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11906 scale_dim.width = roi[2];
11907 scale_dim.height = roi[3];
11908 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11909 scale_dim);
11910 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11911 scale_dim.width, scale_dim.height, mCameraId);
11912 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011913 }
11914 }
11915
11916 return rc;
11917}
11918
11919/*===========================================================================
11920 * FUNCTION : saveRequestSettings
11921 *
11922 * DESCRIPTION: Add any settings that might have changed to the request settings
11923 * and save the settings to be applied on the frame
11924 *
11925 * PARAMETERS :
11926 * @jpegMetadata : the extracted and/or modified jpeg metadata
11927 * @request : request with initial settings
11928 *
11929 * RETURN :
11930 * camera_metadata_t* : pointer to the saved request settings
11931 *==========================================================================*/
11932camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11933 const CameraMetadata &jpegMetadata,
11934 camera3_capture_request_t *request)
11935{
11936 camera_metadata_t *resultMetadata;
11937 CameraMetadata camMetadata;
11938 camMetadata = request->settings;
11939
11940 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11941 int32_t thumbnail_size[2];
11942 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11943 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11944 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11945 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11946 }
11947
11948 if (request->input_buffer != NULL) {
11949 uint8_t reprocessFlags = 1;
11950 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11951 (uint8_t*)&reprocessFlags,
11952 sizeof(reprocessFlags));
11953 }
11954
11955 resultMetadata = camMetadata.release();
11956 return resultMetadata;
11957}
11958
11959/*===========================================================================
11960 * FUNCTION : setHalFpsRange
11961 *
11962 * DESCRIPTION: set FPS range parameter
11963 *
11964 *
11965 * PARAMETERS :
11966 * @settings : Metadata from framework
11967 * @hal_metadata: Metadata buffer
11968 *
11969 *
11970 * RETURN : success: NO_ERROR
11971 * failure:
11972 *==========================================================================*/
11973int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11974 metadata_buffer_t *hal_metadata)
11975{
11976 int32_t rc = NO_ERROR;
11977 cam_fps_range_t fps_range;
11978 fps_range.min_fps = (float)
11979 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11980 fps_range.max_fps = (float)
11981 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11982 fps_range.video_min_fps = fps_range.min_fps;
11983 fps_range.video_max_fps = fps_range.max_fps;
11984
11985 LOGD("aeTargetFpsRange fps: [%f %f]",
11986 fps_range.min_fps, fps_range.max_fps);
11987 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11988 * follows:
11989 * ---------------------------------------------------------------|
11990 * Video stream is absent in configure_streams |
11991 * (Camcorder preview before the first video record |
11992 * ---------------------------------------------------------------|
11993 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11994 * | | | vid_min/max_fps|
11995 * ---------------------------------------------------------------|
11996 * NO | [ 30, 240] | 240 | [240, 240] |
11997 * |-------------|-------------|----------------|
11998 * | [240, 240] | 240 | [240, 240] |
11999 * ---------------------------------------------------------------|
12000 * Video stream is present in configure_streams |
12001 * ---------------------------------------------------------------|
12002 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12003 * | | | vid_min/max_fps|
12004 * ---------------------------------------------------------------|
12005 * NO | [ 30, 240] | 240 | [240, 240] |
12006 * (camcorder prev |-------------|-------------|----------------|
12007 * after video rec | [240, 240] | 240 | [240, 240] |
12008 * is stopped) | | | |
12009 * ---------------------------------------------------------------|
12010 * YES | [ 30, 240] | 240 | [240, 240] |
12011 * |-------------|-------------|----------------|
12012 * | [240, 240] | 240 | [240, 240] |
12013 * ---------------------------------------------------------------|
12014 * When Video stream is absent in configure_streams,
12015 * preview fps = sensor_fps / batchsize
12016 * Eg: for 240fps at batchSize 4, preview = 60fps
12017 * for 120fps at batchSize 4, preview = 30fps
12018 *
12019 * When video stream is present in configure_streams, preview fps is as per
12020 * the ratio of preview buffers to video buffers requested in process
12021 * capture request
12022 */
12023 mBatchSize = 0;
12024 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12025 fps_range.min_fps = fps_range.video_max_fps;
12026 fps_range.video_min_fps = fps_range.video_max_fps;
12027 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12028 fps_range.max_fps);
12029 if (NAME_NOT_FOUND != val) {
12030 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12031 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12032 return BAD_VALUE;
12033 }
12034
12035 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12036 /* If batchmode is currently in progress and the fps changes,
12037 * set the flag to restart the sensor */
12038 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12039 (mHFRVideoFps != fps_range.max_fps)) {
12040 mNeedSensorRestart = true;
12041 }
12042 mHFRVideoFps = fps_range.max_fps;
12043 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12044 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12045 mBatchSize = MAX_HFR_BATCH_SIZE;
12046 }
12047 }
12048 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12049
12050 }
12051 } else {
12052 /* HFR mode is session param in backend/ISP. This should be reset when
12053 * in non-HFR mode */
12054 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12055 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12056 return BAD_VALUE;
12057 }
12058 }
12059 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12060 return BAD_VALUE;
12061 }
12062 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12063 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12064 return rc;
12065}
12066
12067/*===========================================================================
12068 * FUNCTION : translateToHalMetadata
12069 *
12070 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12071 *
12072 *
12073 * PARAMETERS :
12074 * @request : request sent from framework
12075 *
12076 *
12077 * RETURN : success: NO_ERROR
12078 * failure:
12079 *==========================================================================*/
12080int QCamera3HardwareInterface::translateToHalMetadata
12081 (const camera3_capture_request_t *request,
12082 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012083 uint32_t snapshotStreamId) {
12084 if (request == nullptr || hal_metadata == nullptr) {
12085 return BAD_VALUE;
12086 }
12087
12088 int64_t minFrameDuration = getMinFrameDuration(request);
12089
12090 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12091 minFrameDuration);
12092}
12093
12094int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12095 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12096 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12097
Thierry Strudel3d639192016-09-09 11:52:26 -070012098 int rc = 0;
12099 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012100 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012101
12102 /* Do not change the order of the following list unless you know what you are
12103 * doing.
12104 * The order is laid out in such a way that parameters in the front of the table
12105 * may be used to override the parameters later in the table. Examples are:
12106 * 1. META_MODE should precede AEC/AWB/AF MODE
12107 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12108 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12109 * 4. Any mode should precede it's corresponding settings
12110 */
12111 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12112 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12113 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12114 rc = BAD_VALUE;
12115 }
12116 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12117 if (rc != NO_ERROR) {
12118 LOGE("extractSceneMode failed");
12119 }
12120 }
12121
12122 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12123 uint8_t fwk_aeMode =
12124 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12125 uint8_t aeMode;
12126 int32_t redeye;
12127
12128 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12129 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012130 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12131 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012132 } else {
12133 aeMode = CAM_AE_MODE_ON;
12134 }
12135 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12136 redeye = 1;
12137 } else {
12138 redeye = 0;
12139 }
12140
12141 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12142 fwk_aeMode);
12143 if (NAME_NOT_FOUND != val) {
12144 int32_t flashMode = (int32_t)val;
12145 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12146 }
12147
12148 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12149 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12150 rc = BAD_VALUE;
12151 }
12152 }
12153
12154 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12155 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12156 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12157 fwk_whiteLevel);
12158 if (NAME_NOT_FOUND != val) {
12159 uint8_t whiteLevel = (uint8_t)val;
12160 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12161 rc = BAD_VALUE;
12162 }
12163 }
12164 }
12165
12166 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12167 uint8_t fwk_cacMode =
12168 frame_settings.find(
12169 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12170 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12171 fwk_cacMode);
12172 if (NAME_NOT_FOUND != val) {
12173 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12174 bool entryAvailable = FALSE;
12175 // Check whether Frameworks set CAC mode is supported in device or not
12176 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12177 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12178 entryAvailable = TRUE;
12179 break;
12180 }
12181 }
12182 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12183 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12184 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12185 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12186 if (entryAvailable == FALSE) {
12187 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12188 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12189 } else {
12190 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12191 // High is not supported and so set the FAST as spec say's underlying
12192 // device implementation can be the same for both modes.
12193 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12194 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12195 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12196 // in order to avoid the fps drop due to high quality
12197 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12198 } else {
12199 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12200 }
12201 }
12202 }
12203 LOGD("Final cacMode is %d", cacMode);
12204 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12205 rc = BAD_VALUE;
12206 }
12207 } else {
12208 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12209 }
12210 }
12211
Jason Lee84ae9972017-02-24 13:24:24 -080012212 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012213 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012214 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012215 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012216 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12217 fwk_focusMode);
12218 if (NAME_NOT_FOUND != val) {
12219 uint8_t focusMode = (uint8_t)val;
12220 LOGD("set focus mode %d", focusMode);
12221 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12222 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12223 rc = BAD_VALUE;
12224 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012225 }
12226 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012227 } else {
12228 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12229 LOGE("Focus forced to infinity %d", focusMode);
12230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12231 rc = BAD_VALUE;
12232 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012233 }
12234
Jason Lee84ae9972017-02-24 13:24:24 -080012235 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12236 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012237 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12238 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12239 focalDistance)) {
12240 rc = BAD_VALUE;
12241 }
12242 }
12243
12244 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12245 uint8_t fwk_antibandingMode =
12246 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12247 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12248 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12249 if (NAME_NOT_FOUND != val) {
12250 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012251 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12252 if (m60HzZone) {
12253 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12254 } else {
12255 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12256 }
12257 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012258 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12259 hal_antibandingMode)) {
12260 rc = BAD_VALUE;
12261 }
12262 }
12263 }
12264
12265 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12266 int32_t expCompensation = frame_settings.find(
12267 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12268 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12269 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12270 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12271 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012272 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12274 expCompensation)) {
12275 rc = BAD_VALUE;
12276 }
12277 }
12278
12279 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12280 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12281 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12282 rc = BAD_VALUE;
12283 }
12284 }
12285 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12286 rc = setHalFpsRange(frame_settings, hal_metadata);
12287 if (rc != NO_ERROR) {
12288 LOGE("setHalFpsRange failed");
12289 }
12290 }
12291
12292 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12293 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12294 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12295 rc = BAD_VALUE;
12296 }
12297 }
12298
12299 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12300 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12301 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12302 fwk_effectMode);
12303 if (NAME_NOT_FOUND != val) {
12304 uint8_t effectMode = (uint8_t)val;
12305 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12306 rc = BAD_VALUE;
12307 }
12308 }
12309 }
12310
12311 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12312 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12313 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12314 colorCorrectMode)) {
12315 rc = BAD_VALUE;
12316 }
12317 }
12318
12319 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12320 cam_color_correct_gains_t colorCorrectGains;
12321 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12322 colorCorrectGains.gains[i] =
12323 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12324 }
12325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12326 colorCorrectGains)) {
12327 rc = BAD_VALUE;
12328 }
12329 }
12330
12331 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12332 cam_color_correct_matrix_t colorCorrectTransform;
12333 cam_rational_type_t transform_elem;
12334 size_t num = 0;
12335 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12336 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12337 transform_elem.numerator =
12338 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12339 transform_elem.denominator =
12340 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12341 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12342 num++;
12343 }
12344 }
12345 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12346 colorCorrectTransform)) {
12347 rc = BAD_VALUE;
12348 }
12349 }
12350
12351 cam_trigger_t aecTrigger;
12352 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12353 aecTrigger.trigger_id = -1;
12354 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12355 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12356 aecTrigger.trigger =
12357 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12358 aecTrigger.trigger_id =
12359 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12360 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12361 aecTrigger)) {
12362 rc = BAD_VALUE;
12363 }
12364 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12365 aecTrigger.trigger, aecTrigger.trigger_id);
12366 }
12367
12368 /*af_trigger must come with a trigger id*/
12369 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12370 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12371 cam_trigger_t af_trigger;
12372 af_trigger.trigger =
12373 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12374 af_trigger.trigger_id =
12375 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12376 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12377 rc = BAD_VALUE;
12378 }
12379 LOGD("AfTrigger: %d AfTriggerID: %d",
12380 af_trigger.trigger, af_trigger.trigger_id);
12381 }
12382
12383 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12384 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12385 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12386 rc = BAD_VALUE;
12387 }
12388 }
12389 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12390 cam_edge_application_t edge_application;
12391 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012392
Thierry Strudel3d639192016-09-09 11:52:26 -070012393 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12394 edge_application.sharpness = 0;
12395 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012396 edge_application.sharpness =
12397 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12398 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12399 int32_t sharpness =
12400 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12401 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12402 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12403 LOGD("Setting edge mode sharpness %d", sharpness);
12404 edge_application.sharpness = sharpness;
12405 }
12406 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012407 }
12408 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12409 rc = BAD_VALUE;
12410 }
12411 }
12412
12413 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12414 int32_t respectFlashMode = 1;
12415 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12416 uint8_t fwk_aeMode =
12417 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012418 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12419 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12420 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012421 respectFlashMode = 0;
12422 LOGH("AE Mode controls flash, ignore android.flash.mode");
12423 }
12424 }
12425 if (respectFlashMode) {
12426 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12427 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12428 LOGH("flash mode after mapping %d", val);
12429 // To check: CAM_INTF_META_FLASH_MODE usage
12430 if (NAME_NOT_FOUND != val) {
12431 uint8_t flashMode = (uint8_t)val;
12432 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12433 rc = BAD_VALUE;
12434 }
12435 }
12436 }
12437 }
12438
12439 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12440 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12441 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12442 rc = BAD_VALUE;
12443 }
12444 }
12445
12446 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12447 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12448 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12449 flashFiringTime)) {
12450 rc = BAD_VALUE;
12451 }
12452 }
12453
12454 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12455 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12456 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12457 hotPixelMode)) {
12458 rc = BAD_VALUE;
12459 }
12460 }
12461
12462 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12463 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12464 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12465 lensAperture)) {
12466 rc = BAD_VALUE;
12467 }
12468 }
12469
12470 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12471 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12472 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12473 filterDensity)) {
12474 rc = BAD_VALUE;
12475 }
12476 }
12477
12478 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12479 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12480 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12481 focalLength)) {
12482 rc = BAD_VALUE;
12483 }
12484 }
12485
12486 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12487 uint8_t optStabMode =
12488 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12489 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12490 optStabMode)) {
12491 rc = BAD_VALUE;
12492 }
12493 }
12494
12495 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12496 uint8_t videoStabMode =
12497 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12498 LOGD("videoStabMode from APP = %d", videoStabMode);
12499 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12500 videoStabMode)) {
12501 rc = BAD_VALUE;
12502 }
12503 }
12504
12505
12506 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12507 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12508 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12509 noiseRedMode)) {
12510 rc = BAD_VALUE;
12511 }
12512 }
12513
12514 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12515 float reprocessEffectiveExposureFactor =
12516 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12517 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12518 reprocessEffectiveExposureFactor)) {
12519 rc = BAD_VALUE;
12520 }
12521 }
12522
12523 cam_crop_region_t scalerCropRegion;
12524 bool scalerCropSet = false;
12525 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12526 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12527 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12528 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12529 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12530
12531 // Map coordinate system from active array to sensor output.
12532 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12533 scalerCropRegion.width, scalerCropRegion.height);
12534
12535 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12536 scalerCropRegion)) {
12537 rc = BAD_VALUE;
12538 }
12539 scalerCropSet = true;
12540 }
12541
12542 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12543 int64_t sensorExpTime =
12544 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12545 LOGD("setting sensorExpTime %lld", sensorExpTime);
12546 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12547 sensorExpTime)) {
12548 rc = BAD_VALUE;
12549 }
12550 }
12551
12552 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12553 int64_t sensorFrameDuration =
12554 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012555 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12556 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12557 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12558 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12559 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12560 sensorFrameDuration)) {
12561 rc = BAD_VALUE;
12562 }
12563 }
12564
12565 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12566 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12567 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12568 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12569 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12570 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12571 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12572 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12573 sensorSensitivity)) {
12574 rc = BAD_VALUE;
12575 }
12576 }
12577
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012578#ifndef USE_HAL_3_3
12579 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12580 int32_t ispSensitivity =
12581 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12582 if (ispSensitivity <
12583 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12584 ispSensitivity =
12585 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12586 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12587 }
12588 if (ispSensitivity >
12589 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12590 ispSensitivity =
12591 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12592 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12593 }
12594 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12595 ispSensitivity)) {
12596 rc = BAD_VALUE;
12597 }
12598 }
12599#endif
12600
Thierry Strudel3d639192016-09-09 11:52:26 -070012601 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12602 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12603 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12604 rc = BAD_VALUE;
12605 }
12606 }
12607
12608 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12609 uint8_t fwk_facedetectMode =
12610 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12611
12612 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12613 fwk_facedetectMode);
12614
12615 if (NAME_NOT_FOUND != val) {
12616 uint8_t facedetectMode = (uint8_t)val;
12617 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12618 facedetectMode)) {
12619 rc = BAD_VALUE;
12620 }
12621 }
12622 }
12623
Thierry Strudel54dc9782017-02-15 12:12:10 -080012624 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012625 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012626 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012627 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12628 histogramMode)) {
12629 rc = BAD_VALUE;
12630 }
12631 }
12632
12633 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12634 uint8_t sharpnessMapMode =
12635 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12636 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12637 sharpnessMapMode)) {
12638 rc = BAD_VALUE;
12639 }
12640 }
12641
12642 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12643 uint8_t tonemapMode =
12644 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12645 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12646 rc = BAD_VALUE;
12647 }
12648 }
12649 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12650 /*All tonemap channels will have the same number of points*/
12651 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12652 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12653 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12654 cam_rgb_tonemap_curves tonemapCurves;
12655 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12656 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12657 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12658 tonemapCurves.tonemap_points_cnt,
12659 CAM_MAX_TONEMAP_CURVE_SIZE);
12660 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12661 }
12662
12663 /* ch0 = G*/
12664 size_t point = 0;
12665 cam_tonemap_curve_t tonemapCurveGreen;
12666 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12667 for (size_t j = 0; j < 2; j++) {
12668 tonemapCurveGreen.tonemap_points[i][j] =
12669 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12670 point++;
12671 }
12672 }
12673 tonemapCurves.curves[0] = tonemapCurveGreen;
12674
12675 /* ch 1 = B */
12676 point = 0;
12677 cam_tonemap_curve_t tonemapCurveBlue;
12678 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12679 for (size_t j = 0; j < 2; j++) {
12680 tonemapCurveBlue.tonemap_points[i][j] =
12681 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12682 point++;
12683 }
12684 }
12685 tonemapCurves.curves[1] = tonemapCurveBlue;
12686
12687 /* ch 2 = R */
12688 point = 0;
12689 cam_tonemap_curve_t tonemapCurveRed;
12690 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12691 for (size_t j = 0; j < 2; j++) {
12692 tonemapCurveRed.tonemap_points[i][j] =
12693 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12694 point++;
12695 }
12696 }
12697 tonemapCurves.curves[2] = tonemapCurveRed;
12698
12699 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12700 tonemapCurves)) {
12701 rc = BAD_VALUE;
12702 }
12703 }
12704
12705 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12706 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12707 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12708 captureIntent)) {
12709 rc = BAD_VALUE;
12710 }
12711 }
12712
12713 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12714 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12715 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12716 blackLevelLock)) {
12717 rc = BAD_VALUE;
12718 }
12719 }
12720
12721 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12722 uint8_t lensShadingMapMode =
12723 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12724 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12725 lensShadingMapMode)) {
12726 rc = BAD_VALUE;
12727 }
12728 }
12729
12730 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12731 cam_area_t roi;
12732 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012733 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012734
12735 // Map coordinate system from active array to sensor output.
12736 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12737 roi.rect.height);
12738
12739 if (scalerCropSet) {
12740 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12741 }
12742 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12743 rc = BAD_VALUE;
12744 }
12745 }
12746
12747 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12748 cam_area_t roi;
12749 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012750 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012751
12752 // Map coordinate system from active array to sensor output.
12753 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12754 roi.rect.height);
12755
12756 if (scalerCropSet) {
12757 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12758 }
12759 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12760 rc = BAD_VALUE;
12761 }
12762 }
12763
12764 // CDS for non-HFR non-video mode
12765 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12766 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12767 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12768 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12769 LOGE("Invalid CDS mode %d!", *fwk_cds);
12770 } else {
12771 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12772 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12773 rc = BAD_VALUE;
12774 }
12775 }
12776 }
12777
Thierry Strudel04e026f2016-10-10 11:27:36 -070012778 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012779 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012780 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012781 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12782 }
12783 if (m_bVideoHdrEnabled)
12784 vhdr = CAM_VIDEO_HDR_MODE_ON;
12785
Thierry Strudel54dc9782017-02-15 12:12:10 -080012786 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12787
12788 if(vhdr != curr_hdr_state)
12789 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12790
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012791 rc = setVideoHdrMode(mParameters, vhdr);
12792 if (rc != NO_ERROR) {
12793 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012794 }
12795
12796 //IR
12797 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12798 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12799 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012800 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12801 uint8_t isIRon = 0;
12802
12803 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012804 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12805 LOGE("Invalid IR mode %d!", fwk_ir);
12806 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012807 if(isIRon != curr_ir_state )
12808 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12809
Thierry Strudel04e026f2016-10-10 11:27:36 -070012810 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12811 CAM_INTF_META_IR_MODE, fwk_ir)) {
12812 rc = BAD_VALUE;
12813 }
12814 }
12815 }
12816
Thierry Strudel54dc9782017-02-15 12:12:10 -080012817 //Binning Correction Mode
12818 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12819 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12820 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12821 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12822 || (0 > fwk_binning_correction)) {
12823 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12824 } else {
12825 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12826 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12827 rc = BAD_VALUE;
12828 }
12829 }
12830 }
12831
Thierry Strudel269c81a2016-10-12 12:13:59 -070012832 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12833 float aec_speed;
12834 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12835 LOGD("AEC Speed :%f", aec_speed);
12836 if ( aec_speed < 0 ) {
12837 LOGE("Invalid AEC mode %f!", aec_speed);
12838 } else {
12839 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12840 aec_speed)) {
12841 rc = BAD_VALUE;
12842 }
12843 }
12844 }
12845
12846 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12847 float awb_speed;
12848 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12849 LOGD("AWB Speed :%f", awb_speed);
12850 if ( awb_speed < 0 ) {
12851 LOGE("Invalid AWB mode %f!", awb_speed);
12852 } else {
12853 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12854 awb_speed)) {
12855 rc = BAD_VALUE;
12856 }
12857 }
12858 }
12859
Thierry Strudel3d639192016-09-09 11:52:26 -070012860 // TNR
12861 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12862 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12863 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012864 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012865 cam_denoise_param_t tnr;
12866 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12867 tnr.process_plates =
12868 (cam_denoise_process_type_t)frame_settings.find(
12869 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12870 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012871
12872 if(b_TnrRequested != curr_tnr_state)
12873 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12874
Thierry Strudel3d639192016-09-09 11:52:26 -070012875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12876 rc = BAD_VALUE;
12877 }
12878 }
12879
Thierry Strudel54dc9782017-02-15 12:12:10 -080012880 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012881 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012882 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012883 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12884 *exposure_metering_mode)) {
12885 rc = BAD_VALUE;
12886 }
12887 }
12888
Thierry Strudel3d639192016-09-09 11:52:26 -070012889 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12890 int32_t fwk_testPatternMode =
12891 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12892 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12893 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12894
12895 if (NAME_NOT_FOUND != testPatternMode) {
12896 cam_test_pattern_data_t testPatternData;
12897 memset(&testPatternData, 0, sizeof(testPatternData));
12898 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12899 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12900 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12901 int32_t *fwk_testPatternData =
12902 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12903 testPatternData.r = fwk_testPatternData[0];
12904 testPatternData.b = fwk_testPatternData[3];
12905 switch (gCamCapability[mCameraId]->color_arrangement) {
12906 case CAM_FILTER_ARRANGEMENT_RGGB:
12907 case CAM_FILTER_ARRANGEMENT_GRBG:
12908 testPatternData.gr = fwk_testPatternData[1];
12909 testPatternData.gb = fwk_testPatternData[2];
12910 break;
12911 case CAM_FILTER_ARRANGEMENT_GBRG:
12912 case CAM_FILTER_ARRANGEMENT_BGGR:
12913 testPatternData.gr = fwk_testPatternData[2];
12914 testPatternData.gb = fwk_testPatternData[1];
12915 break;
12916 default:
12917 LOGE("color arrangement %d is not supported",
12918 gCamCapability[mCameraId]->color_arrangement);
12919 break;
12920 }
12921 }
12922 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12923 testPatternData)) {
12924 rc = BAD_VALUE;
12925 }
12926 } else {
12927 LOGE("Invalid framework sensor test pattern mode %d",
12928 fwk_testPatternMode);
12929 }
12930 }
12931
12932 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12933 size_t count = 0;
12934 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12935 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12936 gps_coords.data.d, gps_coords.count, count);
12937 if (gps_coords.count != count) {
12938 rc = BAD_VALUE;
12939 }
12940 }
12941
12942 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12943 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12944 size_t count = 0;
12945 const char *gps_methods_src = (const char *)
12946 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12947 memset(gps_methods, '\0', sizeof(gps_methods));
12948 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12949 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12950 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12951 if (GPS_PROCESSING_METHOD_SIZE != count) {
12952 rc = BAD_VALUE;
12953 }
12954 }
12955
12956 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12957 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12958 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12959 gps_timestamp)) {
12960 rc = BAD_VALUE;
12961 }
12962 }
12963
12964 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12965 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12966 cam_rotation_info_t rotation_info;
12967 if (orientation == 0) {
12968 rotation_info.rotation = ROTATE_0;
12969 } else if (orientation == 90) {
12970 rotation_info.rotation = ROTATE_90;
12971 } else if (orientation == 180) {
12972 rotation_info.rotation = ROTATE_180;
12973 } else if (orientation == 270) {
12974 rotation_info.rotation = ROTATE_270;
12975 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012976 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012977 rotation_info.streamId = snapshotStreamId;
12978 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12979 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12980 rc = BAD_VALUE;
12981 }
12982 }
12983
12984 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12985 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12986 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12987 rc = BAD_VALUE;
12988 }
12989 }
12990
12991 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12992 uint32_t thumb_quality = (uint32_t)
12993 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12994 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12995 thumb_quality)) {
12996 rc = BAD_VALUE;
12997 }
12998 }
12999
13000 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13001 cam_dimension_t dim;
13002 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13003 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13005 rc = BAD_VALUE;
13006 }
13007 }
13008
13009 // Internal metadata
13010 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13011 size_t count = 0;
13012 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13013 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13014 privatedata.data.i32, privatedata.count, count);
13015 if (privatedata.count != count) {
13016 rc = BAD_VALUE;
13017 }
13018 }
13019
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013020 // ISO/Exposure Priority
13021 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13022 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13023 cam_priority_mode_t mode =
13024 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13025 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13026 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13027 use_iso_exp_pty.previewOnly = FALSE;
13028 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13029 use_iso_exp_pty.value = *ptr;
13030
13031 if(CAM_ISO_PRIORITY == mode) {
13032 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13033 use_iso_exp_pty)) {
13034 rc = BAD_VALUE;
13035 }
13036 }
13037 else {
13038 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13039 use_iso_exp_pty)) {
13040 rc = BAD_VALUE;
13041 }
13042 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013043
13044 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13045 rc = BAD_VALUE;
13046 }
13047 }
13048 } else {
13049 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13050 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013051 }
13052 }
13053
13054 // Saturation
13055 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13056 int32_t* use_saturation =
13057 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13058 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13059 rc = BAD_VALUE;
13060 }
13061 }
13062
Thierry Strudel3d639192016-09-09 11:52:26 -070013063 // EV step
13064 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13065 gCamCapability[mCameraId]->exp_compensation_step)) {
13066 rc = BAD_VALUE;
13067 }
13068
13069 // CDS info
13070 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13071 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13072 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13073
13074 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13075 CAM_INTF_META_CDS_DATA, *cdsData)) {
13076 rc = BAD_VALUE;
13077 }
13078 }
13079
Shuzhen Wang19463d72016-03-08 11:09:52 -080013080 // Hybrid AE
13081 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13082 uint8_t *hybrid_ae = (uint8_t *)
13083 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
13084
13085 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13086 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13087 rc = BAD_VALUE;
13088 }
13089 }
13090
Shuzhen Wang14415f52016-11-16 18:26:18 -080013091 // Histogram
13092 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13093 uint8_t histogramMode =
13094 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13096 histogramMode)) {
13097 rc = BAD_VALUE;
13098 }
13099 }
13100
13101 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13102 int32_t histogramBins =
13103 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13104 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13105 histogramBins)) {
13106 rc = BAD_VALUE;
13107 }
13108 }
13109
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013110 // Tracking AF
13111 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13112 uint8_t trackingAfTrigger =
13113 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13114 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13115 trackingAfTrigger)) {
13116 rc = BAD_VALUE;
13117 }
13118 }
13119
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013120 // Makernote
13121 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13122 if (entry.count != 0) {
13123 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13124 cam_makernote_t makernote;
13125 makernote.length = entry.count;
13126 memcpy(makernote.data, entry.data.u8, makernote.length);
13127 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13128 rc = BAD_VALUE;
13129 }
13130 } else {
13131 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13132 MAX_MAKERNOTE_LENGTH);
13133 rc = BAD_VALUE;
13134 }
13135 }
13136
Thierry Strudel3d639192016-09-09 11:52:26 -070013137 return rc;
13138}
13139
13140/*===========================================================================
13141 * FUNCTION : captureResultCb
13142 *
13143 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13144 *
13145 * PARAMETERS :
13146 * @frame : frame information from mm-camera-interface
13147 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13148 * @userdata: userdata
13149 *
13150 * RETURN : NONE
13151 *==========================================================================*/
13152void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13153 camera3_stream_buffer_t *buffer,
13154 uint32_t frame_number, bool isInputBuffer, void *userdata)
13155{
13156 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13157 if (hw == NULL) {
13158 LOGE("Invalid hw %p", hw);
13159 return;
13160 }
13161
13162 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13163 return;
13164}
13165
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013166/*===========================================================================
13167 * FUNCTION : setBufferErrorStatus
13168 *
13169 * DESCRIPTION: Callback handler for channels to report any buffer errors
13170 *
13171 * PARAMETERS :
13172 * @ch : Channel on which buffer error is reported from
13173 * @frame_number : frame number on which buffer error is reported on
13174 * @buffer_status : buffer error status
13175 * @userdata: userdata
13176 *
13177 * RETURN : NONE
13178 *==========================================================================*/
13179void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13180 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13181{
13182 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13183 if (hw == NULL) {
13184 LOGE("Invalid hw %p", hw);
13185 return;
13186 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013187
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013188 hw->setBufferErrorStatus(ch, frame_number, err);
13189 return;
13190}
13191
13192void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13193 uint32_t frameNumber, camera3_buffer_status_t err)
13194{
13195 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13196 pthread_mutex_lock(&mMutex);
13197
13198 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13199 if (req.frame_number != frameNumber)
13200 continue;
13201 for (auto& k : req.mPendingBufferList) {
13202 if(k.stream->priv == ch) {
13203 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13204 }
13205 }
13206 }
13207
13208 pthread_mutex_unlock(&mMutex);
13209 return;
13210}
Thierry Strudel3d639192016-09-09 11:52:26 -070013211/*===========================================================================
13212 * FUNCTION : initialize
13213 *
13214 * DESCRIPTION: Pass framework callback pointers to HAL
13215 *
13216 * PARAMETERS :
13217 *
13218 *
13219 * RETURN : Success : 0
13220 * Failure: -ENODEV
13221 *==========================================================================*/
13222
13223int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13224 const camera3_callback_ops_t *callback_ops)
13225{
13226 LOGD("E");
13227 QCamera3HardwareInterface *hw =
13228 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13229 if (!hw) {
13230 LOGE("NULL camera device");
13231 return -ENODEV;
13232 }
13233
13234 int rc = hw->initialize(callback_ops);
13235 LOGD("X");
13236 return rc;
13237}
13238
13239/*===========================================================================
13240 * FUNCTION : configure_streams
13241 *
13242 * DESCRIPTION:
13243 *
13244 * PARAMETERS :
13245 *
13246 *
13247 * RETURN : Success: 0
13248 * Failure: -EINVAL (if stream configuration is invalid)
13249 * -ENODEV (fatal error)
13250 *==========================================================================*/
13251
13252int QCamera3HardwareInterface::configure_streams(
13253 const struct camera3_device *device,
13254 camera3_stream_configuration_t *stream_list)
13255{
13256 LOGD("E");
13257 QCamera3HardwareInterface *hw =
13258 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13259 if (!hw) {
13260 LOGE("NULL camera device");
13261 return -ENODEV;
13262 }
13263 int rc = hw->configureStreams(stream_list);
13264 LOGD("X");
13265 return rc;
13266}
13267
13268/*===========================================================================
13269 * FUNCTION : construct_default_request_settings
13270 *
13271 * DESCRIPTION: Configure a settings buffer to meet the required use case
13272 *
13273 * PARAMETERS :
13274 *
13275 *
13276 * RETURN : Success: Return valid metadata
13277 * Failure: Return NULL
13278 *==========================================================================*/
13279const camera_metadata_t* QCamera3HardwareInterface::
13280 construct_default_request_settings(const struct camera3_device *device,
13281 int type)
13282{
13283
13284 LOGD("E");
13285 camera_metadata_t* fwk_metadata = NULL;
13286 QCamera3HardwareInterface *hw =
13287 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13288 if (!hw) {
13289 LOGE("NULL camera device");
13290 return NULL;
13291 }
13292
13293 fwk_metadata = hw->translateCapabilityToMetadata(type);
13294
13295 LOGD("X");
13296 return fwk_metadata;
13297}
13298
13299/*===========================================================================
13300 * FUNCTION : process_capture_request
13301 *
13302 * DESCRIPTION:
13303 *
13304 * PARAMETERS :
13305 *
13306 *
13307 * RETURN :
13308 *==========================================================================*/
13309int QCamera3HardwareInterface::process_capture_request(
13310 const struct camera3_device *device,
13311 camera3_capture_request_t *request)
13312{
13313 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013314 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013315 QCamera3HardwareInterface *hw =
13316 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13317 if (!hw) {
13318 LOGE("NULL camera device");
13319 return -EINVAL;
13320 }
13321
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013322 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013323 LOGD("X");
13324 return rc;
13325}
13326
13327/*===========================================================================
13328 * FUNCTION : dump
13329 *
13330 * DESCRIPTION:
13331 *
13332 * PARAMETERS :
13333 *
13334 *
13335 * RETURN :
13336 *==========================================================================*/
13337
13338void QCamera3HardwareInterface::dump(
13339 const struct camera3_device *device, int fd)
13340{
13341 /* Log level property is read when "adb shell dumpsys media.camera" is
13342 called so that the log level can be controlled without restarting
13343 the media server */
13344 getLogLevel();
13345
13346 LOGD("E");
13347 QCamera3HardwareInterface *hw =
13348 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13349 if (!hw) {
13350 LOGE("NULL camera device");
13351 return;
13352 }
13353
13354 hw->dump(fd);
13355 LOGD("X");
13356 return;
13357}
13358
13359/*===========================================================================
13360 * FUNCTION : flush
13361 *
13362 * DESCRIPTION:
13363 *
13364 * PARAMETERS :
13365 *
13366 *
13367 * RETURN :
13368 *==========================================================================*/
13369
13370int QCamera3HardwareInterface::flush(
13371 const struct camera3_device *device)
13372{
13373 int rc;
13374 LOGD("E");
13375 QCamera3HardwareInterface *hw =
13376 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13377 if (!hw) {
13378 LOGE("NULL camera device");
13379 return -EINVAL;
13380 }
13381
13382 pthread_mutex_lock(&hw->mMutex);
13383 // Validate current state
13384 switch (hw->mState) {
13385 case STARTED:
13386 /* valid state */
13387 break;
13388
13389 case ERROR:
13390 pthread_mutex_unlock(&hw->mMutex);
13391 hw->handleCameraDeviceError();
13392 return -ENODEV;
13393
13394 default:
13395 LOGI("Flush returned during state %d", hw->mState);
13396 pthread_mutex_unlock(&hw->mMutex);
13397 return 0;
13398 }
13399 pthread_mutex_unlock(&hw->mMutex);
13400
13401 rc = hw->flush(true /* restart channels */ );
13402 LOGD("X");
13403 return rc;
13404}
13405
13406/*===========================================================================
13407 * FUNCTION : close_camera_device
13408 *
13409 * DESCRIPTION:
13410 *
13411 * PARAMETERS :
13412 *
13413 *
13414 * RETURN :
13415 *==========================================================================*/
13416int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13417{
13418 int ret = NO_ERROR;
13419 QCamera3HardwareInterface *hw =
13420 reinterpret_cast<QCamera3HardwareInterface *>(
13421 reinterpret_cast<camera3_device_t *>(device)->priv);
13422 if (!hw) {
13423 LOGE("NULL camera device");
13424 return BAD_VALUE;
13425 }
13426
13427 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13428 delete hw;
13429 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013430 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013431 return ret;
13432}
13433
13434/*===========================================================================
13435 * FUNCTION : getWaveletDenoiseProcessPlate
13436 *
13437 * DESCRIPTION: query wavelet denoise process plate
13438 *
13439 * PARAMETERS : None
13440 *
13441 * RETURN : WNR prcocess plate value
13442 *==========================================================================*/
13443cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13444{
13445 char prop[PROPERTY_VALUE_MAX];
13446 memset(prop, 0, sizeof(prop));
13447 property_get("persist.denoise.process.plates", prop, "0");
13448 int processPlate = atoi(prop);
13449 switch(processPlate) {
13450 case 0:
13451 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13452 case 1:
13453 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13454 case 2:
13455 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13456 case 3:
13457 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13458 default:
13459 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13460 }
13461}
13462
13463
13464/*===========================================================================
13465 * FUNCTION : getTemporalDenoiseProcessPlate
13466 *
13467 * DESCRIPTION: query temporal denoise process plate
13468 *
13469 * PARAMETERS : None
13470 *
13471 * RETURN : TNR prcocess plate value
13472 *==========================================================================*/
13473cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13474{
13475 char prop[PROPERTY_VALUE_MAX];
13476 memset(prop, 0, sizeof(prop));
13477 property_get("persist.tnr.process.plates", prop, "0");
13478 int processPlate = atoi(prop);
13479 switch(processPlate) {
13480 case 0:
13481 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13482 case 1:
13483 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13484 case 2:
13485 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13486 case 3:
13487 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13488 default:
13489 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13490 }
13491}
13492
13493
13494/*===========================================================================
13495 * FUNCTION : extractSceneMode
13496 *
13497 * DESCRIPTION: Extract scene mode from frameworks set metadata
13498 *
13499 * PARAMETERS :
13500 * @frame_settings: CameraMetadata reference
13501 * @metaMode: ANDROID_CONTORL_MODE
13502 * @hal_metadata: hal metadata structure
13503 *
13504 * RETURN : None
13505 *==========================================================================*/
13506int32_t QCamera3HardwareInterface::extractSceneMode(
13507 const CameraMetadata &frame_settings, uint8_t metaMode,
13508 metadata_buffer_t *hal_metadata)
13509{
13510 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013511 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13512
13513 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13514 LOGD("Ignoring control mode OFF_KEEP_STATE");
13515 return NO_ERROR;
13516 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013517
13518 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13519 camera_metadata_ro_entry entry =
13520 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13521 if (0 == entry.count)
13522 return rc;
13523
13524 uint8_t fwk_sceneMode = entry.data.u8[0];
13525
13526 int val = lookupHalName(SCENE_MODES_MAP,
13527 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13528 fwk_sceneMode);
13529 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013530 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013531 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013532 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013533 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013534
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013535 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13536 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13537 }
13538
13539 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13540 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013541 cam_hdr_param_t hdr_params;
13542 hdr_params.hdr_enable = 1;
13543 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13544 hdr_params.hdr_need_1x = false;
13545 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13546 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13547 rc = BAD_VALUE;
13548 }
13549 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013550
Thierry Strudel3d639192016-09-09 11:52:26 -070013551 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13552 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13553 rc = BAD_VALUE;
13554 }
13555 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013556
13557 if (mForceHdrSnapshot) {
13558 cam_hdr_param_t hdr_params;
13559 hdr_params.hdr_enable = 1;
13560 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13561 hdr_params.hdr_need_1x = false;
13562 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13563 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13564 rc = BAD_VALUE;
13565 }
13566 }
13567
Thierry Strudel3d639192016-09-09 11:52:26 -070013568 return rc;
13569}
13570
13571/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013572 * FUNCTION : setVideoHdrMode
13573 *
13574 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13575 *
13576 * PARAMETERS :
13577 * @hal_metadata: hal metadata structure
13578 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13579 *
13580 * RETURN : None
13581 *==========================================================================*/
13582int32_t QCamera3HardwareInterface::setVideoHdrMode(
13583 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13584{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013585 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13586 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13587 }
13588
13589 LOGE("Invalid Video HDR mode %d!", vhdr);
13590 return BAD_VALUE;
13591}
13592
13593/*===========================================================================
13594 * FUNCTION : setSensorHDR
13595 *
13596 * DESCRIPTION: Enable/disable sensor HDR.
13597 *
13598 * PARAMETERS :
13599 * @hal_metadata: hal metadata structure
13600 * @enable: boolean whether to enable/disable sensor HDR
13601 *
13602 * RETURN : None
13603 *==========================================================================*/
13604int32_t QCamera3HardwareInterface::setSensorHDR(
13605 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13606{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013607 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013608 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13609
13610 if (enable) {
13611 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13612 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13613 #ifdef _LE_CAMERA_
13614 //Default to staggered HDR for IOT
13615 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13616 #else
13617 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13618 #endif
13619 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13620 }
13621
13622 bool isSupported = false;
13623 switch (sensor_hdr) {
13624 case CAM_SENSOR_HDR_IN_SENSOR:
13625 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13626 CAM_QCOM_FEATURE_SENSOR_HDR) {
13627 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013628 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013629 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013630 break;
13631 case CAM_SENSOR_HDR_ZIGZAG:
13632 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13633 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13634 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013635 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013636 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013637 break;
13638 case CAM_SENSOR_HDR_STAGGERED:
13639 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13640 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13641 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013642 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013643 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013644 break;
13645 case CAM_SENSOR_HDR_OFF:
13646 isSupported = true;
13647 LOGD("Turning off sensor HDR");
13648 break;
13649 default:
13650 LOGE("HDR mode %d not supported", sensor_hdr);
13651 rc = BAD_VALUE;
13652 break;
13653 }
13654
13655 if(isSupported) {
13656 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13657 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13658 rc = BAD_VALUE;
13659 } else {
13660 if(!isVideoHdrEnable)
13661 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013662 }
13663 }
13664 return rc;
13665}
13666
13667/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013668 * FUNCTION : needRotationReprocess
13669 *
13670 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13671 *
13672 * PARAMETERS : none
13673 *
13674 * RETURN : true: needed
13675 * false: no need
13676 *==========================================================================*/
13677bool QCamera3HardwareInterface::needRotationReprocess()
13678{
13679 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13680 // current rotation is not zero, and pp has the capability to process rotation
13681 LOGH("need do reprocess for rotation");
13682 return true;
13683 }
13684
13685 return false;
13686}
13687
13688/*===========================================================================
13689 * FUNCTION : needReprocess
13690 *
13691 * DESCRIPTION: if reprocess in needed
13692 *
13693 * PARAMETERS : none
13694 *
13695 * RETURN : true: needed
13696 * false: no need
13697 *==========================================================================*/
13698bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13699{
13700 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13701 // TODO: add for ZSL HDR later
13702 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13703 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13704 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13705 return true;
13706 } else {
13707 LOGH("already post processed frame");
13708 return false;
13709 }
13710 }
13711 return needRotationReprocess();
13712}
13713
13714/*===========================================================================
13715 * FUNCTION : needJpegExifRotation
13716 *
13717 * DESCRIPTION: if rotation from jpeg is needed
13718 *
13719 * PARAMETERS : none
13720 *
13721 * RETURN : true: needed
13722 * false: no need
13723 *==========================================================================*/
13724bool QCamera3HardwareInterface::needJpegExifRotation()
13725{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013726 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013727 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13728 LOGD("Need use Jpeg EXIF Rotation");
13729 return true;
13730 }
13731 return false;
13732}
13733
13734/*===========================================================================
13735 * FUNCTION : addOfflineReprocChannel
13736 *
13737 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13738 * coming from input channel
13739 *
13740 * PARAMETERS :
13741 * @config : reprocess configuration
13742 * @inputChHandle : pointer to the input (source) channel
13743 *
13744 *
13745 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13746 *==========================================================================*/
13747QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13748 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13749{
13750 int32_t rc = NO_ERROR;
13751 QCamera3ReprocessChannel *pChannel = NULL;
13752
13753 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013754 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13755 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013756 if (NULL == pChannel) {
13757 LOGE("no mem for reprocess channel");
13758 return NULL;
13759 }
13760
13761 rc = pChannel->initialize(IS_TYPE_NONE);
13762 if (rc != NO_ERROR) {
13763 LOGE("init reprocess channel failed, ret = %d", rc);
13764 delete pChannel;
13765 return NULL;
13766 }
13767
13768 // pp feature config
13769 cam_pp_feature_config_t pp_config;
13770 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13771
13772 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13773 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13774 & CAM_QCOM_FEATURE_DSDN) {
13775 //Use CPP CDS incase h/w supports it.
13776 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13777 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13778 }
13779 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13780 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13781 }
13782
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013783 if (config.hdr_param.hdr_enable) {
13784 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13785 pp_config.hdr_param = config.hdr_param;
13786 }
13787
13788 if (mForceHdrSnapshot) {
13789 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13790 pp_config.hdr_param.hdr_enable = 1;
13791 pp_config.hdr_param.hdr_need_1x = 0;
13792 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13793 }
13794
Thierry Strudel3d639192016-09-09 11:52:26 -070013795 rc = pChannel->addReprocStreamsFromSource(pp_config,
13796 config,
13797 IS_TYPE_NONE,
13798 mMetadataChannel);
13799
13800 if (rc != NO_ERROR) {
13801 delete pChannel;
13802 return NULL;
13803 }
13804 return pChannel;
13805}
13806
13807/*===========================================================================
13808 * FUNCTION : getMobicatMask
13809 *
13810 * DESCRIPTION: returns mobicat mask
13811 *
13812 * PARAMETERS : none
13813 *
13814 * RETURN : mobicat mask
13815 *
13816 *==========================================================================*/
13817uint8_t QCamera3HardwareInterface::getMobicatMask()
13818{
13819 return m_MobicatMask;
13820}
13821
13822/*===========================================================================
13823 * FUNCTION : setMobicat
13824 *
13825 * DESCRIPTION: set Mobicat on/off.
13826 *
13827 * PARAMETERS :
13828 * @params : none
13829 *
13830 * RETURN : int32_t type of status
13831 * NO_ERROR -- success
13832 * none-zero failure code
13833 *==========================================================================*/
13834int32_t QCamera3HardwareInterface::setMobicat()
13835{
Thierry Strudel3d639192016-09-09 11:52:26 -070013836 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013837
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013838 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013839 tune_cmd_t tune_cmd;
13840 tune_cmd.type = SET_RELOAD_CHROMATIX;
13841 tune_cmd.module = MODULE_ALL;
13842 tune_cmd.value = TRUE;
13843 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13844 CAM_INTF_PARM_SET_VFE_COMMAND,
13845 tune_cmd);
13846
13847 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13848 CAM_INTF_PARM_SET_PP_COMMAND,
13849 tune_cmd);
13850 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013851
13852 return ret;
13853}
13854
13855/*===========================================================================
13856* FUNCTION : getLogLevel
13857*
13858* DESCRIPTION: Reads the log level property into a variable
13859*
13860* PARAMETERS :
13861* None
13862*
13863* RETURN :
13864* None
13865*==========================================================================*/
13866void QCamera3HardwareInterface::getLogLevel()
13867{
13868 char prop[PROPERTY_VALUE_MAX];
13869 uint32_t globalLogLevel = 0;
13870
13871 property_get("persist.camera.hal.debug", prop, "0");
13872 int val = atoi(prop);
13873 if (0 <= val) {
13874 gCamHal3LogLevel = (uint32_t)val;
13875 }
13876
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013877 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013878 gKpiDebugLevel = atoi(prop);
13879
13880 property_get("persist.camera.global.debug", prop, "0");
13881 val = atoi(prop);
13882 if (0 <= val) {
13883 globalLogLevel = (uint32_t)val;
13884 }
13885
13886 /* Highest log level among hal.logs and global.logs is selected */
13887 if (gCamHal3LogLevel < globalLogLevel)
13888 gCamHal3LogLevel = globalLogLevel;
13889
13890 return;
13891}
13892
13893/*===========================================================================
13894 * FUNCTION : validateStreamRotations
13895 *
13896 * DESCRIPTION: Check if the rotations requested are supported
13897 *
13898 * PARAMETERS :
13899 * @stream_list : streams to be configured
13900 *
13901 * RETURN : NO_ERROR on success
13902 * -EINVAL on failure
13903 *
13904 *==========================================================================*/
13905int QCamera3HardwareInterface::validateStreamRotations(
13906 camera3_stream_configuration_t *streamList)
13907{
13908 int rc = NO_ERROR;
13909
13910 /*
13911 * Loop through all streams requested in configuration
13912 * Check if unsupported rotations have been requested on any of them
13913 */
13914 for (size_t j = 0; j < streamList->num_streams; j++){
13915 camera3_stream_t *newStream = streamList->streams[j];
13916
Emilian Peev35ceeed2017-06-29 11:58:56 -070013917 switch(newStream->rotation) {
13918 case CAMERA3_STREAM_ROTATION_0:
13919 case CAMERA3_STREAM_ROTATION_90:
13920 case CAMERA3_STREAM_ROTATION_180:
13921 case CAMERA3_STREAM_ROTATION_270:
13922 //Expected values
13923 break;
13924 default:
13925 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13926 "type:%d and stream format:%d", __func__,
13927 newStream->rotation, newStream->stream_type,
13928 newStream->format);
13929 return -EINVAL;
13930 }
13931
Thierry Strudel3d639192016-09-09 11:52:26 -070013932 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13933 bool isImplDef = (newStream->format ==
13934 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13935 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13936 isImplDef);
13937
13938 if (isRotated && (!isImplDef || isZsl)) {
13939 LOGE("Error: Unsupported rotation of %d requested for stream"
13940 "type:%d and stream format:%d",
13941 newStream->rotation, newStream->stream_type,
13942 newStream->format);
13943 rc = -EINVAL;
13944 break;
13945 }
13946 }
13947
13948 return rc;
13949}
13950
13951/*===========================================================================
13952* FUNCTION : getFlashInfo
13953*
13954* DESCRIPTION: Retrieve information about whether the device has a flash.
13955*
13956* PARAMETERS :
13957* @cameraId : Camera id to query
13958* @hasFlash : Boolean indicating whether there is a flash device
13959* associated with given camera
13960* @flashNode : If a flash device exists, this will be its device node.
13961*
13962* RETURN :
13963* None
13964*==========================================================================*/
13965void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13966 bool& hasFlash,
13967 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13968{
13969 cam_capability_t* camCapability = gCamCapability[cameraId];
13970 if (NULL == camCapability) {
13971 hasFlash = false;
13972 flashNode[0] = '\0';
13973 } else {
13974 hasFlash = camCapability->flash_available;
13975 strlcpy(flashNode,
13976 (char*)camCapability->flash_dev_name,
13977 QCAMERA_MAX_FILEPATH_LENGTH);
13978 }
13979}
13980
13981/*===========================================================================
13982* FUNCTION : getEepromVersionInfo
13983*
13984* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13985*
13986* PARAMETERS : None
13987*
13988* RETURN : string describing EEPROM version
13989* "\0" if no such info available
13990*==========================================================================*/
13991const char *QCamera3HardwareInterface::getEepromVersionInfo()
13992{
13993 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13994}
13995
13996/*===========================================================================
13997* FUNCTION : getLdafCalib
13998*
13999* DESCRIPTION: Retrieve Laser AF calibration data
14000*
14001* PARAMETERS : None
14002*
14003* RETURN : Two uint32_t describing laser AF calibration data
14004* NULL if none is available.
14005*==========================================================================*/
14006const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14007{
14008 if (mLdafCalibExist) {
14009 return &mLdafCalib[0];
14010 } else {
14011 return NULL;
14012 }
14013}
14014
14015/*===========================================================================
14016 * FUNCTION : dynamicUpdateMetaStreamInfo
14017 *
14018 * DESCRIPTION: This function:
14019 * (1) stops all the channels
14020 * (2) returns error on pending requests and buffers
14021 * (3) sends metastream_info in setparams
14022 * (4) starts all channels
14023 * This is useful when sensor has to be restarted to apply any
14024 * settings such as frame rate from a different sensor mode
14025 *
14026 * PARAMETERS : None
14027 *
14028 * RETURN : NO_ERROR on success
14029 * Error codes on failure
14030 *
14031 *==========================================================================*/
14032int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14033{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014034 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014035 int rc = NO_ERROR;
14036
14037 LOGD("E");
14038
14039 rc = stopAllChannels();
14040 if (rc < 0) {
14041 LOGE("stopAllChannels failed");
14042 return rc;
14043 }
14044
14045 rc = notifyErrorForPendingRequests();
14046 if (rc < 0) {
14047 LOGE("notifyErrorForPendingRequests failed");
14048 return rc;
14049 }
14050
14051 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14052 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14053 "Format:%d",
14054 mStreamConfigInfo.type[i],
14055 mStreamConfigInfo.stream_sizes[i].width,
14056 mStreamConfigInfo.stream_sizes[i].height,
14057 mStreamConfigInfo.postprocess_mask[i],
14058 mStreamConfigInfo.format[i]);
14059 }
14060
14061 /* Send meta stream info once again so that ISP can start */
14062 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14063 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14064 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14065 mParameters);
14066 if (rc < 0) {
14067 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14068 }
14069
14070 rc = startAllChannels();
14071 if (rc < 0) {
14072 LOGE("startAllChannels failed");
14073 return rc;
14074 }
14075
14076 LOGD("X");
14077 return rc;
14078}
14079
14080/*===========================================================================
14081 * FUNCTION : stopAllChannels
14082 *
14083 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14084 *
14085 * PARAMETERS : None
14086 *
14087 * RETURN : NO_ERROR on success
14088 * Error codes on failure
14089 *
14090 *==========================================================================*/
14091int32_t QCamera3HardwareInterface::stopAllChannels()
14092{
14093 int32_t rc = NO_ERROR;
14094
14095 LOGD("Stopping all channels");
14096 // Stop the Streams/Channels
14097 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14098 it != mStreamInfo.end(); it++) {
14099 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14100 if (channel) {
14101 channel->stop();
14102 }
14103 (*it)->status = INVALID;
14104 }
14105
14106 if (mSupportChannel) {
14107 mSupportChannel->stop();
14108 }
14109 if (mAnalysisChannel) {
14110 mAnalysisChannel->stop();
14111 }
14112 if (mRawDumpChannel) {
14113 mRawDumpChannel->stop();
14114 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014115 if (mHdrPlusRawSrcChannel) {
14116 mHdrPlusRawSrcChannel->stop();
14117 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014118 if (mMetadataChannel) {
14119 /* If content of mStreamInfo is not 0, there is metadata stream */
14120 mMetadataChannel->stop();
14121 }
14122
14123 LOGD("All channels stopped");
14124 return rc;
14125}
14126
14127/*===========================================================================
14128 * FUNCTION : startAllChannels
14129 *
14130 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14131 *
14132 * PARAMETERS : None
14133 *
14134 * RETURN : NO_ERROR on success
14135 * Error codes on failure
14136 *
14137 *==========================================================================*/
14138int32_t QCamera3HardwareInterface::startAllChannels()
14139{
14140 int32_t rc = NO_ERROR;
14141
14142 LOGD("Start all channels ");
14143 // Start the Streams/Channels
14144 if (mMetadataChannel) {
14145 /* If content of mStreamInfo is not 0, there is metadata stream */
14146 rc = mMetadataChannel->start();
14147 if (rc < 0) {
14148 LOGE("META channel start failed");
14149 return rc;
14150 }
14151 }
14152 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14153 it != mStreamInfo.end(); it++) {
14154 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14155 if (channel) {
14156 rc = channel->start();
14157 if (rc < 0) {
14158 LOGE("channel start failed");
14159 return rc;
14160 }
14161 }
14162 }
14163 if (mAnalysisChannel) {
14164 mAnalysisChannel->start();
14165 }
14166 if (mSupportChannel) {
14167 rc = mSupportChannel->start();
14168 if (rc < 0) {
14169 LOGE("Support channel start failed");
14170 return rc;
14171 }
14172 }
14173 if (mRawDumpChannel) {
14174 rc = mRawDumpChannel->start();
14175 if (rc < 0) {
14176 LOGE("RAW dump channel start failed");
14177 return rc;
14178 }
14179 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014180 if (mHdrPlusRawSrcChannel) {
14181 rc = mHdrPlusRawSrcChannel->start();
14182 if (rc < 0) {
14183 LOGE("HDR+ RAW channel start failed");
14184 return rc;
14185 }
14186 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014187
14188 LOGD("All channels started");
14189 return rc;
14190}
14191
14192/*===========================================================================
14193 * FUNCTION : notifyErrorForPendingRequests
14194 *
14195 * DESCRIPTION: This function sends error for all the pending requests/buffers
14196 *
14197 * PARAMETERS : None
14198 *
14199 * RETURN : Error codes
14200 * NO_ERROR on success
14201 *
14202 *==========================================================================*/
14203int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14204{
Emilian Peev7650c122017-01-19 08:24:33 -080014205 notifyErrorFoPendingDepthData(mDepthChannel);
14206
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014207 auto pendingRequest = mPendingRequestsList.begin();
14208 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014209
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014210 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14211 // buffers (for which buffers aren't sent yet).
14212 while (pendingRequest != mPendingRequestsList.end() ||
14213 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14214 if (pendingRequest == mPendingRequestsList.end() ||
14215 pendingBuffer->frame_number < pendingRequest->frame_number) {
14216 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14217 // with error.
14218 for (auto &info : pendingBuffer->mPendingBufferList) {
14219 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014220 camera3_notify_msg_t notify_msg;
14221 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14222 notify_msg.type = CAMERA3_MSG_ERROR;
14223 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014224 notify_msg.message.error.error_stream = info.stream;
14225 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014226 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014227
14228 camera3_stream_buffer_t buffer = {};
14229 buffer.acquire_fence = -1;
14230 buffer.release_fence = -1;
14231 buffer.buffer = info.buffer;
14232 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14233 buffer.stream = info.stream;
14234 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014235 }
14236
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014237 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14238 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14239 pendingBuffer->frame_number > pendingRequest->frame_number) {
14240 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014241 camera3_notify_msg_t notify_msg;
14242 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14243 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014244 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14245 notify_msg.message.error.error_stream = nullptr;
14246 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014247 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014248
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014249 if (pendingRequest->input_buffer != nullptr) {
14250 camera3_capture_result result = {};
14251 result.frame_number = pendingRequest->frame_number;
14252 result.result = nullptr;
14253 result.input_buffer = pendingRequest->input_buffer;
14254 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014255 }
14256
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014257 mShutterDispatcher.clear(pendingRequest->frame_number);
14258 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14259 } else {
14260 // If both buffers and result metadata weren't sent yet, notify about a request error
14261 // and return buffers with error.
14262 for (auto &info : pendingBuffer->mPendingBufferList) {
14263 camera3_notify_msg_t notify_msg;
14264 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14265 notify_msg.type = CAMERA3_MSG_ERROR;
14266 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14267 notify_msg.message.error.error_stream = info.stream;
14268 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14269 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014270
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014271 camera3_stream_buffer_t buffer = {};
14272 buffer.acquire_fence = -1;
14273 buffer.release_fence = -1;
14274 buffer.buffer = info.buffer;
14275 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14276 buffer.stream = info.stream;
14277 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14278 }
14279
14280 if (pendingRequest->input_buffer != nullptr) {
14281 camera3_capture_result result = {};
14282 result.frame_number = pendingRequest->frame_number;
14283 result.result = nullptr;
14284 result.input_buffer = pendingRequest->input_buffer;
14285 orchestrateResult(&result);
14286 }
14287
14288 mShutterDispatcher.clear(pendingRequest->frame_number);
14289 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14290 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014291 }
14292 }
14293
14294 /* Reset pending frame Drop list and requests list */
14295 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014296 mShutterDispatcher.clear();
14297 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014298 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014299 mExpectedFrameDuration = 0;
14300 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014301 LOGH("Cleared all the pending buffers ");
14302
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014303 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014304}
14305
14306bool QCamera3HardwareInterface::isOnEncoder(
14307 const cam_dimension_t max_viewfinder_size,
14308 uint32_t width, uint32_t height)
14309{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014310 return ((width > (uint32_t)max_viewfinder_size.width) ||
14311 (height > (uint32_t)max_viewfinder_size.height) ||
14312 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14313 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014314}
14315
14316/*===========================================================================
14317 * FUNCTION : setBundleInfo
14318 *
14319 * DESCRIPTION: Set bundle info for all streams that are bundle.
14320 *
14321 * PARAMETERS : None
14322 *
14323 * RETURN : NO_ERROR on success
14324 * Error codes on failure
14325 *==========================================================================*/
14326int32_t QCamera3HardwareInterface::setBundleInfo()
14327{
14328 int32_t rc = NO_ERROR;
14329
14330 if (mChannelHandle) {
14331 cam_bundle_config_t bundleInfo;
14332 memset(&bundleInfo, 0, sizeof(bundleInfo));
14333 rc = mCameraHandle->ops->get_bundle_info(
14334 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14335 if (rc != NO_ERROR) {
14336 LOGE("get_bundle_info failed");
14337 return rc;
14338 }
14339 if (mAnalysisChannel) {
14340 mAnalysisChannel->setBundleInfo(bundleInfo);
14341 }
14342 if (mSupportChannel) {
14343 mSupportChannel->setBundleInfo(bundleInfo);
14344 }
14345 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14346 it != mStreamInfo.end(); it++) {
14347 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14348 channel->setBundleInfo(bundleInfo);
14349 }
14350 if (mRawDumpChannel) {
14351 mRawDumpChannel->setBundleInfo(bundleInfo);
14352 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014353 if (mHdrPlusRawSrcChannel) {
14354 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14355 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014356 }
14357
14358 return rc;
14359}
14360
14361/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014362 * FUNCTION : setInstantAEC
14363 *
14364 * DESCRIPTION: Set Instant AEC related params.
14365 *
14366 * PARAMETERS :
14367 * @meta: CameraMetadata reference
14368 *
14369 * RETURN : NO_ERROR on success
14370 * Error codes on failure
14371 *==========================================================================*/
14372int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14373{
14374 int32_t rc = NO_ERROR;
14375 uint8_t val = 0;
14376 char prop[PROPERTY_VALUE_MAX];
14377
14378 // First try to configure instant AEC from framework metadata
14379 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14380 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14381 }
14382
14383 // If framework did not set this value, try to read from set prop.
14384 if (val == 0) {
14385 memset(prop, 0, sizeof(prop));
14386 property_get("persist.camera.instant.aec", prop, "0");
14387 val = (uint8_t)atoi(prop);
14388 }
14389
14390 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14391 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14392 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14393 mInstantAEC = val;
14394 mInstantAECSettledFrameNumber = 0;
14395 mInstantAecFrameIdxCount = 0;
14396 LOGH("instantAEC value set %d",val);
14397 if (mInstantAEC) {
14398 memset(prop, 0, sizeof(prop));
14399 property_get("persist.camera.ae.instant.bound", prop, "10");
14400 int32_t aec_frame_skip_cnt = atoi(prop);
14401 if (aec_frame_skip_cnt >= 0) {
14402 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14403 } else {
14404 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14405 rc = BAD_VALUE;
14406 }
14407 }
14408 } else {
14409 LOGE("Bad instant aec value set %d", val);
14410 rc = BAD_VALUE;
14411 }
14412 return rc;
14413}
14414
14415/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014416 * FUNCTION : get_num_overall_buffers
14417 *
14418 * DESCRIPTION: Estimate number of pending buffers across all requests.
14419 *
14420 * PARAMETERS : None
14421 *
14422 * RETURN : Number of overall pending buffers
14423 *
14424 *==========================================================================*/
14425uint32_t PendingBuffersMap::get_num_overall_buffers()
14426{
14427 uint32_t sum_buffers = 0;
14428 for (auto &req : mPendingBuffersInRequest) {
14429 sum_buffers += req.mPendingBufferList.size();
14430 }
14431 return sum_buffers;
14432}
14433
14434/*===========================================================================
14435 * FUNCTION : removeBuf
14436 *
14437 * DESCRIPTION: Remove a matching buffer from tracker.
14438 *
14439 * PARAMETERS : @buffer: image buffer for the callback
14440 *
14441 * RETURN : None
14442 *
14443 *==========================================================================*/
14444void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14445{
14446 bool buffer_found = false;
14447 for (auto req = mPendingBuffersInRequest.begin();
14448 req != mPendingBuffersInRequest.end(); req++) {
14449 for (auto k = req->mPendingBufferList.begin();
14450 k != req->mPendingBufferList.end(); k++ ) {
14451 if (k->buffer == buffer) {
14452 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14453 req->frame_number, buffer);
14454 k = req->mPendingBufferList.erase(k);
14455 if (req->mPendingBufferList.empty()) {
14456 // Remove this request from Map
14457 req = mPendingBuffersInRequest.erase(req);
14458 }
14459 buffer_found = true;
14460 break;
14461 }
14462 }
14463 if (buffer_found) {
14464 break;
14465 }
14466 }
14467 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14468 get_num_overall_buffers());
14469}
14470
14471/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014472 * FUNCTION : getBufErrStatus
14473 *
14474 * DESCRIPTION: get buffer error status
14475 *
14476 * PARAMETERS : @buffer: buffer handle
14477 *
14478 * RETURN : Error status
14479 *
14480 *==========================================================================*/
14481int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14482{
14483 for (auto& req : mPendingBuffersInRequest) {
14484 for (auto& k : req.mPendingBufferList) {
14485 if (k.buffer == buffer)
14486 return k.bufStatus;
14487 }
14488 }
14489 return CAMERA3_BUFFER_STATUS_OK;
14490}
14491
14492/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014493 * FUNCTION : setPAAFSupport
14494 *
14495 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14496 * feature mask according to stream type and filter
14497 * arrangement
14498 *
14499 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14500 * @stream_type: stream type
14501 * @filter_arrangement: filter arrangement
14502 *
14503 * RETURN : None
14504 *==========================================================================*/
14505void QCamera3HardwareInterface::setPAAFSupport(
14506 cam_feature_mask_t& feature_mask,
14507 cam_stream_type_t stream_type,
14508 cam_color_filter_arrangement_t filter_arrangement)
14509{
Thierry Strudel3d639192016-09-09 11:52:26 -070014510 switch (filter_arrangement) {
14511 case CAM_FILTER_ARRANGEMENT_RGGB:
14512 case CAM_FILTER_ARRANGEMENT_GRBG:
14513 case CAM_FILTER_ARRANGEMENT_GBRG:
14514 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014515 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14516 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014517 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014518 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14519 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014520 }
14521 break;
14522 case CAM_FILTER_ARRANGEMENT_Y:
14523 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14524 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14525 }
14526 break;
14527 default:
14528 break;
14529 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014530 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14531 feature_mask, stream_type, filter_arrangement);
14532
14533
Thierry Strudel3d639192016-09-09 11:52:26 -070014534}
14535
14536/*===========================================================================
14537* FUNCTION : getSensorMountAngle
14538*
14539* DESCRIPTION: Retrieve sensor mount angle
14540*
14541* PARAMETERS : None
14542*
14543* RETURN : sensor mount angle in uint32_t
14544*==========================================================================*/
14545uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14546{
14547 return gCamCapability[mCameraId]->sensor_mount_angle;
14548}
14549
14550/*===========================================================================
14551* FUNCTION : getRelatedCalibrationData
14552*
14553* DESCRIPTION: Retrieve related system calibration data
14554*
14555* PARAMETERS : None
14556*
14557* RETURN : Pointer of related system calibration data
14558*==========================================================================*/
14559const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14560{
14561 return (const cam_related_system_calibration_data_t *)
14562 &(gCamCapability[mCameraId]->related_cam_calibration);
14563}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014564
14565/*===========================================================================
14566 * FUNCTION : is60HzZone
14567 *
14568 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14569 *
14570 * PARAMETERS : None
14571 *
14572 * RETURN : True if in 60Hz zone, False otherwise
14573 *==========================================================================*/
14574bool QCamera3HardwareInterface::is60HzZone()
14575{
14576 time_t t = time(NULL);
14577 struct tm lt;
14578
14579 struct tm* r = localtime_r(&t, &lt);
14580
14581 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14582 return true;
14583 else
14584 return false;
14585}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014586
14587/*===========================================================================
14588 * FUNCTION : adjustBlackLevelForCFA
14589 *
14590 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14591 * of bayer CFA (Color Filter Array).
14592 *
14593 * PARAMETERS : @input: black level pattern in the order of RGGB
14594 * @output: black level pattern in the order of CFA
14595 * @color_arrangement: CFA color arrangement
14596 *
14597 * RETURN : None
14598 *==========================================================================*/
14599template<typename T>
14600void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14601 T input[BLACK_LEVEL_PATTERN_CNT],
14602 T output[BLACK_LEVEL_PATTERN_CNT],
14603 cam_color_filter_arrangement_t color_arrangement)
14604{
14605 switch (color_arrangement) {
14606 case CAM_FILTER_ARRANGEMENT_GRBG:
14607 output[0] = input[1];
14608 output[1] = input[0];
14609 output[2] = input[3];
14610 output[3] = input[2];
14611 break;
14612 case CAM_FILTER_ARRANGEMENT_GBRG:
14613 output[0] = input[2];
14614 output[1] = input[3];
14615 output[2] = input[0];
14616 output[3] = input[1];
14617 break;
14618 case CAM_FILTER_ARRANGEMENT_BGGR:
14619 output[0] = input[3];
14620 output[1] = input[2];
14621 output[2] = input[1];
14622 output[3] = input[0];
14623 break;
14624 case CAM_FILTER_ARRANGEMENT_RGGB:
14625 output[0] = input[0];
14626 output[1] = input[1];
14627 output[2] = input[2];
14628 output[3] = input[3];
14629 break;
14630 default:
14631 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14632 break;
14633 }
14634}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014635
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014636void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14637 CameraMetadata &resultMetadata,
14638 std::shared_ptr<metadata_buffer_t> settings)
14639{
14640 if (settings == nullptr) {
14641 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14642 return;
14643 }
14644
14645 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14646 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14647 }
14648
14649 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14650 String8 str((const char *)gps_methods);
14651 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14652 }
14653
14654 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14655 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14656 }
14657
14658 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14659 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14660 }
14661
14662 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14663 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14664 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14665 }
14666
14667 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14668 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14669 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14670 }
14671
14672 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14673 int32_t fwk_thumb_size[2];
14674 fwk_thumb_size[0] = thumb_size->width;
14675 fwk_thumb_size[1] = thumb_size->height;
14676 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14677 }
14678
14679 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14680 uint8_t fwk_intent = intent[0];
14681 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14682 }
14683}
14684
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014685bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14686 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014687 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14688 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14689 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014690 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014691 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014692 return false;
14693 }
14694
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014695 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014696 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14697 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014698 return false;
14699 }
14700
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014701 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14702 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14703 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14704 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14705 return false;
14706 }
14707
14708 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14709 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14710 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14711 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14712 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14713 return false;
14714 }
14715
14716 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14717 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14718 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14719 return false;
14720 }
14721
14722 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14723 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14724 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14725 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14726 return false;
14727 }
14728
14729 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14730 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14731 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14732 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14733 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14734 return false;
14735 }
14736
14737 // TODO (b/32585046): support non-ZSL.
14738 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14739 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14740 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14741 return false;
14742 }
14743
14744 // TODO (b/32586081): support flash.
14745 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14746 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14747 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14748 return false;
14749 }
14750
14751 // TODO (b/36492953): support digital zoom.
14752 if (!metadata.exists(ANDROID_SCALER_CROP_REGION) ||
14753 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[0] != 0 ||
14754 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[1] != 0 ||
14755 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[2] !=
14756 gCamCapability[mCameraId]->active_array_size.width ||
14757 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[3] !=
14758 gCamCapability[mCameraId]->active_array_size.height) {
14759 ALOGV("%s: ANDROID_SCALER_CROP_REGION is not the same as active array region.",
14760 __FUNCTION__);
14761 return false;
14762 }
14763
14764 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14765 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14766 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14767 return false;
14768 }
14769
14770 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014771 if (request.num_output_buffers != 1 ||
14772 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014773 ALOGV("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014774 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014775 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014776 request.output_buffers[0].stream->width,
14777 request.output_buffers[0].stream->height,
14778 request.output_buffers[0].stream->format);
14779 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014780 return false;
14781 }
14782
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014783 return true;
14784}
14785
14786bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14787 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14788 const CameraMetadata &metadata)
14789{
14790 if (hdrPlusRequest == nullptr) return false;
14791 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14792
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014793 // Get a YUV buffer from pic channel.
14794 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14795 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14796 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14797 if (res != OK) {
14798 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14799 __FUNCTION__, strerror(-res), res);
14800 return false;
14801 }
14802
14803 pbcamera::StreamBuffer buffer;
14804 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014805 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chencec36ed2017-07-21 13:54:29 -070014806 buffer.data = yuvBuffer->fd == -1 ? yuvBuffer->buffer : nullptr;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014807 buffer.dataSize = yuvBuffer->frame_len;
14808
14809 pbcamera::CaptureRequest pbRequest;
14810 pbRequest.id = request.frame_number;
14811 pbRequest.outputBuffers.push_back(buffer);
14812
14813 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070014814 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014815 if (res != OK) {
14816 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14817 strerror(-res), res);
14818 return false;
14819 }
14820
14821 hdrPlusRequest->yuvBuffer = yuvBuffer;
14822 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14823
14824 return true;
14825}
14826
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014827status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14828{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014829 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14830 return OK;
14831 }
14832
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014833 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014834 if (res != OK) {
14835 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14836 strerror(-res), res);
14837 return res;
14838 }
14839 gHdrPlusClientOpening = true;
14840
14841 return OK;
14842}
14843
Chien-Yu Chenee335912017-02-09 17:53:20 -080014844status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14845{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014846 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014847
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014848 if (mHdrPlusModeEnabled) {
14849 return OK;
14850 }
14851
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014852 // Check if gHdrPlusClient is opened or being opened.
14853 if (gHdrPlusClient == nullptr) {
14854 if (gHdrPlusClientOpening) {
14855 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14856 return OK;
14857 }
14858
14859 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014860 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014861 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14862 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014863 return res;
14864 }
14865
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014866 // When opening HDR+ client completes, HDR+ mode will be enabled.
14867 return OK;
14868
Chien-Yu Chenee335912017-02-09 17:53:20 -080014869 }
14870
14871 // Configure stream for HDR+.
14872 res = configureHdrPlusStreamsLocked();
14873 if (res != OK) {
14874 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014875 return res;
14876 }
14877
14878 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14879 res = gHdrPlusClient->setZslHdrPlusMode(true);
14880 if (res != OK) {
14881 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014882 return res;
14883 }
14884
14885 mHdrPlusModeEnabled = true;
14886 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14887
14888 return OK;
14889}
14890
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014891void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
14892{
14893 if (gHdrPlusClientOpening) {
14894 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
14895 }
14896}
14897
Chien-Yu Chenee335912017-02-09 17:53:20 -080014898void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14899{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014900 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014901 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014902 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14903 if (res != OK) {
14904 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14905 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014906
14907 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014908 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014909 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014910 }
14911
14912 mHdrPlusModeEnabled = false;
14913 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14914}
14915
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070014916bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
14917{
14918 // Check if mPictureChannel is valid.
14919 // TODO: Support YUV (b/36693254) and RAW (b/36690506)
14920 if (mPictureChannel == nullptr) {
14921 return false;
14922 }
14923
14924 return true;
14925}
14926
Chien-Yu Chenee335912017-02-09 17:53:20 -080014927status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014928{
14929 pbcamera::InputConfiguration inputConfig;
14930 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14931 status_t res = OK;
14932
14933 // Configure HDR+ client streams.
14934 // Get input config.
14935 if (mHdrPlusRawSrcChannel) {
14936 // HDR+ input buffers will be provided by HAL.
14937 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14938 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14939 if (res != OK) {
14940 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14941 __FUNCTION__, strerror(-res), res);
14942 return res;
14943 }
14944
14945 inputConfig.isSensorInput = false;
14946 } else {
14947 // Sensor MIPI will send data to Easel.
14948 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014949 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014950 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14951 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14952 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14953 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14954 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014955 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014956 if (mSensorModeInfo.num_raw_bits != 10) {
14957 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14958 mSensorModeInfo.num_raw_bits);
14959 return BAD_VALUE;
14960 }
14961
14962 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014963 }
14964
14965 // Get output configurations.
14966 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014967 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014968
14969 // Easel may need to output YUV output buffers if mPictureChannel was created.
14970 pbcamera::StreamConfiguration yuvOutputConfig;
14971 if (mPictureChannel != nullptr) {
14972 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14973 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14974 if (res != OK) {
14975 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14976 __FUNCTION__, strerror(-res), res);
14977
14978 return res;
14979 }
14980
14981 outputStreamConfigs.push_back(yuvOutputConfig);
14982 }
14983
14984 // TODO: consider other channels for YUV output buffers.
14985
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014986 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014987 if (res != OK) {
14988 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14989 strerror(-res), res);
14990 return res;
14991 }
14992
14993 return OK;
14994}
14995
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070014996void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
14997{
14998 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
14999 // Set HAL state to error.
15000 pthread_mutex_lock(&mMutex);
15001 mState = ERROR;
15002 pthread_mutex_unlock(&mMutex);
15003
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015004 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015005}
15006
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015007void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15008{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015009 if (client == nullptr) {
15010 ALOGE("%s: Opened client is null.", __FUNCTION__);
15011 return;
15012 }
15013
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015014 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015015 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15016
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015017 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015018 if (!gHdrPlusClientOpening) {
15019 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15020 return;
15021 }
15022
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015023 gHdrPlusClient = std::move(client);
15024 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015025 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015026
15027 // Set static metadata.
15028 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15029 if (res != OK) {
15030 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15031 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015032 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015033 gHdrPlusClient = nullptr;
15034 return;
15035 }
15036
15037 // Enable HDR+ mode.
15038 res = enableHdrPlusModeLocked();
15039 if (res != OK) {
15040 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15041 }
15042}
15043
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015044void QCamera3HardwareInterface::onOpenFailed(status_t err)
15045{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015046 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015047 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015048 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015049 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015050}
15051
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015052void QCamera3HardwareInterface::onFatalError()
15053{
15054 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
15055
15056 // Set HAL state to error.
15057 pthread_mutex_lock(&mMutex);
15058 mState = ERROR;
15059 pthread_mutex_unlock(&mMutex);
15060
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015061 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015062}
15063
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015064void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15065{
15066 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15067 __LINE__, requestId, apSensorTimestampNs);
15068
15069 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15070}
15071
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015072void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015073 const camera_metadata_t &resultMetadata)
15074{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015075 if (result != nullptr) {
15076 if (result->outputBuffers.size() != 1) {
15077 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
15078 result->outputBuffers.size());
15079 return;
15080 }
15081
15082 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
15083 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
15084 result->outputBuffers[0].streamId);
15085 return;
15086 }
15087
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015088 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015089 HdrPlusPendingRequest pendingRequest;
15090 {
15091 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15092 auto req = mHdrPlusPendingRequests.find(result->requestId);
15093 pendingRequest = req->second;
15094 }
15095
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015096 // Update the result metadata with the settings of the HDR+ still capture request because
15097 // the result metadata belongs to a ZSL buffer.
15098 CameraMetadata metadata;
15099 metadata = &resultMetadata;
15100 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15101 camera_metadata_t* updatedResultMetadata = metadata.release();
15102
15103 QCamera3PicChannel *picChannel =
15104 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
15105
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015106 // Check if dumping HDR+ YUV output is enabled.
15107 char prop[PROPERTY_VALUE_MAX];
15108 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15109 bool dumpYuvOutput = atoi(prop);
15110
15111 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015112 // Dump yuv buffer to a ppm file.
15113 pbcamera::StreamConfiguration outputConfig;
15114 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
15115 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
15116 if (rc == OK) {
15117 char buf[FILENAME_MAX] = {};
15118 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15119 result->requestId, result->outputBuffers[0].streamId,
15120 outputConfig.image.width, outputConfig.image.height);
15121
15122 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
15123 } else {
15124 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
15125 __FUNCTION__, strerror(-rc), rc);
15126 }
15127 }
15128
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015129 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
15130 auto halMetadata = std::make_shared<metadata_buffer_t>();
15131 clear_metadata_buffer(halMetadata.get());
15132
15133 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
15134 // encoding.
15135 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15136 halStreamId, /*minFrameDuration*/0);
15137 if (res == OK) {
15138 // Return the buffer to pic channel for encoding.
15139 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
15140 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
15141 halMetadata);
15142 } else {
15143 // Return the buffer without encoding.
15144 // TODO: This should not happen but we may want to report an error buffer to camera
15145 // service.
15146 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
15147 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
15148 strerror(-res), res);
15149 }
15150
15151 // Send HDR+ metadata to framework.
15152 {
15153 pthread_mutex_lock(&mMutex);
15154
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015155 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15156 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015157 pthread_mutex_unlock(&mMutex);
15158 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015159
15160 // Remove the HDR+ pending request.
15161 {
15162 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15163 auto req = mHdrPlusPendingRequests.find(result->requestId);
15164 mHdrPlusPendingRequests.erase(req);
15165 }
15166 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015167}
15168
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015169void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15170{
15171 if (failedResult == nullptr) {
15172 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15173 return;
15174 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015175
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015176 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015177
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015178 // Remove the pending HDR+ request.
15179 {
15180 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15181 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
15182
15183 // Return the buffer to pic channel.
15184 QCamera3PicChannel *picChannel =
15185 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
15186 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
15187
15188 mHdrPlusPendingRequests.erase(pendingRequest);
15189 }
15190
15191 pthread_mutex_lock(&mMutex);
15192
15193 // Find the pending buffers.
15194 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15195 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15196 if (pendingBuffers->frame_number == failedResult->requestId) {
15197 break;
15198 }
15199 pendingBuffers++;
15200 }
15201
15202 // Send out buffer errors for the pending buffers.
15203 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15204 std::vector<camera3_stream_buffer_t> streamBuffers;
15205 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15206 // Prepare a stream buffer.
15207 camera3_stream_buffer_t streamBuffer = {};
15208 streamBuffer.stream = buffer.stream;
15209 streamBuffer.buffer = buffer.buffer;
15210 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15211 streamBuffer.acquire_fence = -1;
15212 streamBuffer.release_fence = -1;
15213
15214 streamBuffers.push_back(streamBuffer);
15215
15216 // Send out error buffer event.
15217 camera3_notify_msg_t notify_msg = {};
15218 notify_msg.type = CAMERA3_MSG_ERROR;
15219 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15220 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15221 notify_msg.message.error.error_stream = buffer.stream;
15222
15223 orchestrateNotify(&notify_msg);
15224 }
15225
15226 camera3_capture_result_t result = {};
15227 result.frame_number = pendingBuffers->frame_number;
15228 result.num_output_buffers = streamBuffers.size();
15229 result.output_buffers = &streamBuffers[0];
15230
15231 // Send out result with buffer errors.
15232 orchestrateResult(&result);
15233
15234 // Remove pending buffers.
15235 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15236 }
15237
15238 // Remove pending request.
15239 auto halRequest = mPendingRequestsList.begin();
15240 while (halRequest != mPendingRequestsList.end()) {
15241 if (halRequest->frame_number == failedResult->requestId) {
15242 mPendingRequestsList.erase(halRequest);
15243 break;
15244 }
15245 halRequest++;
15246 }
15247
15248 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015249}
15250
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015251
15252ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15253 mParent(parent) {}
15254
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015255void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015256{
15257 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015258
15259 if (isReprocess) {
15260 mReprocessShutters.emplace(frameNumber, Shutter());
15261 } else {
15262 mShutters.emplace(frameNumber, Shutter());
15263 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015264}
15265
15266void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15267{
15268 std::lock_guard<std::mutex> lock(mLock);
15269
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015270 std::map<uint32_t, Shutter> *shutters = nullptr;
15271
15272 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015273 auto shutter = mShutters.find(frameNumber);
15274 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015275 shutter = mReprocessShutters.find(frameNumber);
15276 if (shutter == mReprocessShutters.end()) {
15277 // Shutter was already sent.
15278 return;
15279 }
15280 shutters = &mReprocessShutters;
15281 } else {
15282 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015283 }
15284
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015285 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015286 shutter->second.ready = true;
15287 shutter->second.timestamp = timestamp;
15288
15289 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015290 shutter = shutters->begin();
15291 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015292 if (!shutter->second.ready) {
15293 // If this shutter is not ready, the following shutters can't be sent.
15294 break;
15295 }
15296
15297 camera3_notify_msg_t msg = {};
15298 msg.type = CAMERA3_MSG_SHUTTER;
15299 msg.message.shutter.frame_number = shutter->first;
15300 msg.message.shutter.timestamp = shutter->second.timestamp;
15301 mParent->orchestrateNotify(&msg);
15302
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015303 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015304 }
15305}
15306
15307void ShutterDispatcher::clear(uint32_t frameNumber)
15308{
15309 std::lock_guard<std::mutex> lock(mLock);
15310 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015311 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015312}
15313
15314void ShutterDispatcher::clear()
15315{
15316 std::lock_guard<std::mutex> lock(mLock);
15317
15318 // Log errors for stale shutters.
15319 for (auto &shutter : mShutters) {
15320 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15321 __FUNCTION__, shutter.first, shutter.second.ready,
15322 shutter.second.timestamp);
15323 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015324
15325 // Log errors for stale reprocess shutters.
15326 for (auto &shutter : mReprocessShutters) {
15327 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15328 __FUNCTION__, shutter.first, shutter.second.ready,
15329 shutter.second.timestamp);
15330 }
15331
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015332 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015333 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015334}
15335
15336OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15337 mParent(parent) {}
15338
15339status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15340{
15341 std::lock_guard<std::mutex> lock(mLock);
15342 mStreamBuffers.clear();
15343 if (!streamList) {
15344 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15345 return -EINVAL;
15346 }
15347
15348 // Create a "frame-number -> buffer" map for each stream.
15349 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15350 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15351 }
15352
15353 return OK;
15354}
15355
15356status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15357{
15358 std::lock_guard<std::mutex> lock(mLock);
15359
15360 // Find the "frame-number -> buffer" map for the stream.
15361 auto buffers = mStreamBuffers.find(stream);
15362 if (buffers == mStreamBuffers.end()) {
15363 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15364 return -EINVAL;
15365 }
15366
15367 // Create an unready buffer for this frame number.
15368 buffers->second.emplace(frameNumber, Buffer());
15369 return OK;
15370}
15371
15372void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15373 const camera3_stream_buffer_t &buffer)
15374{
15375 std::lock_guard<std::mutex> lock(mLock);
15376
15377 // Find the frame number -> buffer map for the stream.
15378 auto buffers = mStreamBuffers.find(buffer.stream);
15379 if (buffers == mStreamBuffers.end()) {
15380 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15381 return;
15382 }
15383
15384 // Find the unready buffer this frame number and mark it ready.
15385 auto pendingBuffer = buffers->second.find(frameNumber);
15386 if (pendingBuffer == buffers->second.end()) {
15387 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15388 return;
15389 }
15390
15391 pendingBuffer->second.ready = true;
15392 pendingBuffer->second.buffer = buffer;
15393
15394 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15395 pendingBuffer = buffers->second.begin();
15396 while (pendingBuffer != buffers->second.end()) {
15397 if (!pendingBuffer->second.ready) {
15398 // If this buffer is not ready, the following buffers can't be sent.
15399 break;
15400 }
15401
15402 camera3_capture_result_t result = {};
15403 result.frame_number = pendingBuffer->first;
15404 result.num_output_buffers = 1;
15405 result.output_buffers = &pendingBuffer->second.buffer;
15406
15407 // Send out result with buffer errors.
15408 mParent->orchestrateResult(&result);
15409
15410 pendingBuffer = buffers->second.erase(pendingBuffer);
15411 }
15412}
15413
15414void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15415{
15416 std::lock_guard<std::mutex> lock(mLock);
15417
15418 // Log errors for stale buffers.
15419 for (auto &buffers : mStreamBuffers) {
15420 for (auto &buffer : buffers.second) {
15421 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15422 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15423 }
15424 buffers.second.clear();
15425 }
15426
15427 if (clearConfiguredStreams) {
15428 mStreamBuffers.clear();
15429 }
15430}
15431
Thierry Strudel3d639192016-09-09 11:52:26 -070015432}; //end namespace qcamera