blob: b74cfa32a9892906fbb1a51df08753a927ca74ae [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +010099#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Jason Lee8ce36fa2017-04-19 19:40:37 -0700116/* Face rect indices */
117#define FACE_LEFT 0
118#define FACE_TOP 1
119#define FACE_RIGHT 2
120#define FACE_BOTTOM 3
121#define FACE_WEIGHT 4
122
Thierry Strudel04e026f2016-10-10 11:27:36 -0700123/* Face landmarks indices */
124#define LEFT_EYE_X 0
125#define LEFT_EYE_Y 1
126#define RIGHT_EYE_X 2
127#define RIGHT_EYE_Y 3
128#define MOUTH_X 4
129#define MOUTH_Y 5
130#define TOTAL_LANDMARK_INDICES 6
131
Zhijun He2a5df222017-04-04 18:20:38 -0700132// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700133#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700134
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700135// Whether to check for the GPU stride padding, or use the default
136//#define CHECK_GPU_PIXEL_ALIGNMENT
137
Thierry Strudel3d639192016-09-09 11:52:26 -0700138cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
139const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
140extern pthread_mutex_t gCamLock;
141volatile uint32_t gCamHal3LogLevel = 1;
142extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800144// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700146std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
148std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
149bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700150std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700151bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700152bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800154// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
155bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700157std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Thierry Strudel3d639192016-09-09 11:52:26 -0700159
160const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
161 {"On", CAM_CDS_MODE_ON},
162 {"Off", CAM_CDS_MODE_OFF},
163 {"Auto",CAM_CDS_MODE_AUTO}
164};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_video_hdr_mode_t,
167 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
168 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
169 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
170};
171
Thierry Strudel54dc9782017-02-15 12:12:10 -0800172const QCamera3HardwareInterface::QCameraMap<
173 camera_metadata_enum_android_binning_correction_mode_t,
174 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
175 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
176 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
177};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_ir_mode_t,
181 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
182 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
183 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
184 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
185};
Thierry Strudel3d639192016-09-09 11:52:26 -0700186
187const QCamera3HardwareInterface::QCameraMap<
188 camera_metadata_enum_android_control_effect_mode_t,
189 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
190 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
191 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
192 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
193 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
194 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
195 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
197 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
198 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202 camera_metadata_enum_android_control_awb_mode_t,
203 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
204 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
205 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
206 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
207 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
208 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
209 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
210 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
211 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
212 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_scene_mode_t,
217 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
218 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
219 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
220 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
222 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
223 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
225 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
226 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
227 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
228 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
229 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
230 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
231 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
232 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800233 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
234 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700235};
236
237const QCamera3HardwareInterface::QCameraMap<
238 camera_metadata_enum_android_control_af_mode_t,
239 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
240 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
241 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
242 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
243 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
244 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
245 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
246 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
247};
248
249const QCamera3HardwareInterface::QCameraMap<
250 camera_metadata_enum_android_color_correction_aberration_mode_t,
251 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
252 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
253 CAM_COLOR_CORRECTION_ABERRATION_OFF },
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
255 CAM_COLOR_CORRECTION_ABERRATION_FAST },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
257 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
258};
259
260const QCamera3HardwareInterface::QCameraMap<
261 camera_metadata_enum_android_control_ae_antibanding_mode_t,
262 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
264 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
267};
268
269const QCamera3HardwareInterface::QCameraMap<
270 camera_metadata_enum_android_control_ae_mode_t,
271 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
272 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
273 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
274 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
275 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
277 { (camera_metadata_enum_android_control_ae_mode_t)
278 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700279};
280
281const QCamera3HardwareInterface::QCameraMap<
282 camera_metadata_enum_android_flash_mode_t,
283 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
284 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
285 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
286 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
287};
288
289const QCamera3HardwareInterface::QCameraMap<
290 camera_metadata_enum_android_statistics_face_detect_mode_t,
291 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
295};
296
297const QCamera3HardwareInterface::QCameraMap<
298 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
299 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
301 CAM_FOCUS_UNCALIBRATED },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
303 CAM_FOCUS_APPROXIMATE },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
305 CAM_FOCUS_CALIBRATED }
306};
307
308const QCamera3HardwareInterface::QCameraMap<
309 camera_metadata_enum_android_lens_state_t,
310 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
311 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
312 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
313};
314
315const int32_t available_thumbnail_sizes[] = {0, 0,
316 176, 144,
317 240, 144,
318 256, 144,
319 240, 160,
320 256, 154,
321 240, 240,
322 320, 240};
323
324const QCamera3HardwareInterface::QCameraMap<
325 camera_metadata_enum_android_sensor_test_pattern_mode_t,
326 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
333};
334
335/* Since there is no mapping for all the options some Android enum are not listed.
336 * Also, the order in this list is important because while mapping from HAL to Android it will
337 * traverse from lower to higher index which means that for HAL values that are map to different
338 * Android values, the traverse logic will select the first one found.
339 */
340const QCamera3HardwareInterface::QCameraMap<
341 camera_metadata_enum_android_sensor_reference_illuminant1_t,
342 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
359};
360
361const QCamera3HardwareInterface::QCameraMap<
362 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
363 { 60, CAM_HFR_MODE_60FPS},
364 { 90, CAM_HFR_MODE_90FPS},
365 { 120, CAM_HFR_MODE_120FPS},
366 { 150, CAM_HFR_MODE_150FPS},
367 { 180, CAM_HFR_MODE_180FPS},
368 { 210, CAM_HFR_MODE_210FPS},
369 { 240, CAM_HFR_MODE_240FPS},
370 { 480, CAM_HFR_MODE_480FPS},
371};
372
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700373const QCamera3HardwareInterface::QCameraMap<
374 qcamera3_ext_instant_aec_mode_t,
375 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
376 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
377 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
379};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800380
381const QCamera3HardwareInterface::QCameraMap<
382 qcamera3_ext_exposure_meter_mode_t,
383 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
384 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
386 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
387 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
388 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
390 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
391};
392
393const QCamera3HardwareInterface::QCameraMap<
394 qcamera3_ext_iso_mode_t,
395 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
396 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
397 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
398 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
399 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
400 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
401 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
402 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
403 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
404};
405
Thierry Strudel3d639192016-09-09 11:52:26 -0700406camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
407 .initialize = QCamera3HardwareInterface::initialize,
408 .configure_streams = QCamera3HardwareInterface::configure_streams,
409 .register_stream_buffers = NULL,
410 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
411 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
412 .get_metadata_vendor_tag_ops = NULL,
413 .dump = QCamera3HardwareInterface::dump,
414 .flush = QCamera3HardwareInterface::flush,
415 .reserved = {0},
416};
417
418// initialise to some default value
419uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
420
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700421static inline void logEaselEvent(const char *tag, const char *event) {
422 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
423 struct timespec ts = {};
424 static int64_t kMsPerSec = 1000;
425 static int64_t kNsPerMs = 1000000;
426 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
427 if (res != OK) {
428 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
429 } else {
430 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
431 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
432 }
433 }
434}
435
Thierry Strudel3d639192016-09-09 11:52:26 -0700436/*===========================================================================
437 * FUNCTION : QCamera3HardwareInterface
438 *
439 * DESCRIPTION: constructor of QCamera3HardwareInterface
440 *
441 * PARAMETERS :
442 * @cameraId : camera ID
443 *
444 * RETURN : none
445 *==========================================================================*/
446QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
447 const camera_module_callbacks_t *callbacks)
448 : mCameraId(cameraId),
449 mCameraHandle(NULL),
450 mCameraInitialized(false),
451 mCallbackOps(NULL),
452 mMetadataChannel(NULL),
453 mPictureChannel(NULL),
454 mRawChannel(NULL),
455 mSupportChannel(NULL),
456 mAnalysisChannel(NULL),
457 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700458 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700459 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800460 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100461 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800462 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mChannelHandle(0),
464 mFirstConfiguration(true),
465 mFlush(false),
466 mFlushPerf(false),
467 mParamHeap(NULL),
468 mParameters(NULL),
469 mPrevParameters(NULL),
470 m_bIsVideo(false),
471 m_bIs4KVideo(false),
472 m_bEisSupportedSize(false),
473 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800474 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700476 mShutterDispatcher(this),
477 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mMinProcessedFrameDuration(0),
479 mMinJpegFrameDuration(0),
480 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100481 mExpectedFrameDuration(0),
482 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700483 mMetaFrameCount(0U),
484 mUpdateDebugLevel(false),
485 mCallbacks(callbacks),
486 mCaptureIntent(0),
487 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700488 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800489 /* DevCamDebug metadata internal m control*/
490 mDevCamDebugMetaEnable(0),
491 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700492 mBatchSize(0),
493 mToBeQueuedVidBufs(0),
494 mHFRVideoFps(DEFAULT_VIDEO_FPS),
495 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800496 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800497 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700498 mFirstFrameNumberInBatch(0),
499 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800500 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700501 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
502 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000503 mPDSupported(false),
504 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700505 mInstantAEC(false),
506 mResetInstantAEC(false),
507 mInstantAECSettledFrameNumber(0),
508 mAecSkipDisplayFrameBound(0),
509 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700510 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800511 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700514 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mState(CLOSED),
516 mIsDeviceLinked(false),
517 mIsMainCamera(true),
518 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700519 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800520 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800521 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700522 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800523 mIsApInputUsedForHdrPlus(false),
524 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700525 m_bSensorHDREnabled(false),
526 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700527{
528 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700529 mCommon.init(gCamCapability[cameraId]);
530 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700531#ifndef USE_HAL_3_3
532 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
533#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700535#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700536 mCameraDevice.common.close = close_camera_device;
537 mCameraDevice.ops = &mCameraOps;
538 mCameraDevice.priv = this;
539 gCamCapability[cameraId]->version = CAM_HAL_V3;
540 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
541 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
542 gCamCapability[cameraId]->min_num_pp_bufs = 3;
543
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800544 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700545
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800546 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700547 mPendingLiveRequest = 0;
548 mCurrentRequestId = -1;
549 pthread_mutex_init(&mMutex, NULL);
550
551 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
552 mDefaultMetadata[i] = NULL;
553
554 // Getting system props of different kinds
555 char prop[PROPERTY_VALUE_MAX];
556 memset(prop, 0, sizeof(prop));
557 property_get("persist.camera.raw.dump", prop, "0");
558 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800559 property_get("persist.camera.hal3.force.hdr", prop, "0");
560 mForceHdrSnapshot = atoi(prop);
561
Thierry Strudel3d639192016-09-09 11:52:26 -0700562 if (mEnableRawDump)
563 LOGD("Raw dump from Camera HAL enabled");
564
565 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
566 memset(mLdafCalib, 0, sizeof(mLdafCalib));
567
Arnd Geis082a4d72017-08-24 10:33:07 -0700568 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
569
Thierry Strudel3d639192016-09-09 11:52:26 -0700570 memset(prop, 0, sizeof(prop));
571 property_get("persist.camera.tnr.preview", prop, "0");
572 m_bTnrPreview = (uint8_t)atoi(prop);
573
574 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800575 property_get("persist.camera.swtnr.preview", prop, "1");
576 m_bSwTnrPreview = (uint8_t)atoi(prop);
577
578 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700579 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700580 m_bTnrVideo = (uint8_t)atoi(prop);
581
582 memset(prop, 0, sizeof(prop));
583 property_get("persist.camera.avtimer.debug", prop, "0");
584 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800585 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700586
Thierry Strudel54dc9782017-02-15 12:12:10 -0800587 memset(prop, 0, sizeof(prop));
588 property_get("persist.camera.cacmode.disable", prop, "0");
589 m_cacModeDisabled = (uint8_t)atoi(prop);
590
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700591 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700592 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700593
Thierry Strudel3d639192016-09-09 11:52:26 -0700594 //Load and read GPU library.
595 lib_surface_utils = NULL;
596 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700597 mSurfaceStridePadding = CAM_PAD_TO_64;
598#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700599 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
600 if (lib_surface_utils) {
601 *(void **)&LINK_get_surface_pixel_alignment =
602 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
603 if (LINK_get_surface_pixel_alignment) {
604 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
605 }
606 dlclose(lib_surface_utils);
607 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700608#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000609 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
610 mPDSupported = (0 <= mPDIndex) ? true : false;
611
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700612 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700613}
614
615/*===========================================================================
616 * FUNCTION : ~QCamera3HardwareInterface
617 *
618 * DESCRIPTION: destructor of QCamera3HardwareInterface
619 *
620 * PARAMETERS : none
621 *
622 * RETURN : none
623 *==========================================================================*/
624QCamera3HardwareInterface::~QCamera3HardwareInterface()
625{
626 LOGD("E");
627
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800628 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700629
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800630 // Disable power hint and enable the perf lock for close camera
631 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
632 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
633
634 // unlink of dualcam during close camera
635 if (mIsDeviceLinked) {
636 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
637 &m_pDualCamCmdPtr->bundle_info;
638 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
639 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
640 pthread_mutex_lock(&gCamLock);
641
642 if (mIsMainCamera == 1) {
643 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
644 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
645 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
646 // related session id should be session id of linked session
647 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
648 } else {
649 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
650 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
651 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
652 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
653 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800654 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800655 pthread_mutex_unlock(&gCamLock);
656
657 rc = mCameraHandle->ops->set_dual_cam_cmd(
658 mCameraHandle->camera_handle);
659 if (rc < 0) {
660 LOGE("Dualcam: Unlink failed, but still proceed to close");
661 }
662 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700663
664 /* We need to stop all streams before deleting any stream */
665 if (mRawDumpChannel) {
666 mRawDumpChannel->stop();
667 }
668
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700669 if (mHdrPlusRawSrcChannel) {
670 mHdrPlusRawSrcChannel->stop();
671 }
672
Thierry Strudel3d639192016-09-09 11:52:26 -0700673 // NOTE: 'camera3_stream_t *' objects are already freed at
674 // this stage by the framework
675 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
676 it != mStreamInfo.end(); it++) {
677 QCamera3ProcessingChannel *channel = (*it)->channel;
678 if (channel) {
679 channel->stop();
680 }
681 }
682 if (mSupportChannel)
683 mSupportChannel->stop();
684
685 if (mAnalysisChannel) {
686 mAnalysisChannel->stop();
687 }
688 if (mMetadataChannel) {
689 mMetadataChannel->stop();
690 }
691 if (mChannelHandle) {
692 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700693 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700694 LOGD("stopping channel %d", mChannelHandle);
695 }
696
697 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
698 it != mStreamInfo.end(); it++) {
699 QCamera3ProcessingChannel *channel = (*it)->channel;
700 if (channel)
701 delete channel;
702 free (*it);
703 }
704 if (mSupportChannel) {
705 delete mSupportChannel;
706 mSupportChannel = NULL;
707 }
708
709 if (mAnalysisChannel) {
710 delete mAnalysisChannel;
711 mAnalysisChannel = NULL;
712 }
713 if (mRawDumpChannel) {
714 delete mRawDumpChannel;
715 mRawDumpChannel = NULL;
716 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700717 if (mHdrPlusRawSrcChannel) {
718 delete mHdrPlusRawSrcChannel;
719 mHdrPlusRawSrcChannel = NULL;
720 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700721 if (mDummyBatchChannel) {
722 delete mDummyBatchChannel;
723 mDummyBatchChannel = NULL;
724 }
725
726 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800727 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700728
729 if (mMetadataChannel) {
730 delete mMetadataChannel;
731 mMetadataChannel = NULL;
732 }
733
734 /* Clean up all channels */
735 if (mCameraInitialized) {
736 if(!mFirstConfiguration){
737 //send the last unconfigure
738 cam_stream_size_info_t stream_config_info;
739 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
740 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
741 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800742 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700743 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700744 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700745 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
746 stream_config_info);
747 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
748 if (rc < 0) {
749 LOGE("set_parms failed for unconfigure");
750 }
751 }
752 deinitParameters();
753 }
754
755 if (mChannelHandle) {
756 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
757 mChannelHandle);
758 LOGH("deleting channel %d", mChannelHandle);
759 mChannelHandle = 0;
760 }
761
762 if (mState != CLOSED)
763 closeCamera();
764
765 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
766 req.mPendingBufferList.clear();
767 }
768 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700769 for (pendingRequestIterator i = mPendingRequestsList.begin();
770 i != mPendingRequestsList.end();) {
771 i = erasePendingRequest(i);
772 }
773 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
774 if (mDefaultMetadata[i])
775 free_camera_metadata(mDefaultMetadata[i]);
776
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800777 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700778
779 pthread_cond_destroy(&mRequestCond);
780
781 pthread_cond_destroy(&mBuffersCond);
782
783 pthread_mutex_destroy(&mMutex);
784 LOGD("X");
785}
786
787/*===========================================================================
788 * FUNCTION : erasePendingRequest
789 *
790 * DESCRIPTION: function to erase a desired pending request after freeing any
791 * allocated memory
792 *
793 * PARAMETERS :
794 * @i : iterator pointing to pending request to be erased
795 *
796 * RETURN : iterator pointing to the next request
797 *==========================================================================*/
798QCamera3HardwareInterface::pendingRequestIterator
799 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
800{
801 if (i->input_buffer != NULL) {
802 free(i->input_buffer);
803 i->input_buffer = NULL;
804 }
805 if (i->settings != NULL)
806 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100807
808 mExpectedInflightDuration -= i->expectedFrameDuration;
809 if (mExpectedInflightDuration < 0) {
810 LOGE("Negative expected in-flight duration!");
811 mExpectedInflightDuration = 0;
812 }
813
Thierry Strudel3d639192016-09-09 11:52:26 -0700814 return mPendingRequestsList.erase(i);
815}
816
817/*===========================================================================
818 * FUNCTION : camEvtHandle
819 *
820 * DESCRIPTION: Function registered to mm-camera-interface to handle events
821 *
822 * PARAMETERS :
823 * @camera_handle : interface layer camera handle
824 * @evt : ptr to event
825 * @user_data : user data ptr
826 *
827 * RETURN : none
828 *==========================================================================*/
829void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
830 mm_camera_event_t *evt,
831 void *user_data)
832{
833 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
834 if (obj && evt) {
835 switch(evt->server_event_type) {
836 case CAM_EVENT_TYPE_DAEMON_DIED:
837 pthread_mutex_lock(&obj->mMutex);
838 obj->mState = ERROR;
839 pthread_mutex_unlock(&obj->mMutex);
840 LOGE("Fatal, camera daemon died");
841 break;
842
843 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
844 LOGD("HAL got request pull from Daemon");
845 pthread_mutex_lock(&obj->mMutex);
846 obj->mWokenUpByDaemon = true;
847 obj->unblockRequestIfNecessary();
848 pthread_mutex_unlock(&obj->mMutex);
849 break;
850
851 default:
852 LOGW("Warning: Unhandled event %d",
853 evt->server_event_type);
854 break;
855 }
856 } else {
857 LOGE("NULL user_data/evt");
858 }
859}
860
861/*===========================================================================
862 * FUNCTION : openCamera
863 *
864 * DESCRIPTION: open camera
865 *
866 * PARAMETERS :
867 * @hw_device : double ptr for camera device struct
868 *
869 * RETURN : int32_t type of status
870 * NO_ERROR -- success
871 * none-zero failure code
872 *==========================================================================*/
873int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
874{
875 int rc = 0;
876 if (mState != CLOSED) {
877 *hw_device = NULL;
878 return PERMISSION_DENIED;
879 }
880
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700881 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800882 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700883 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
884 mCameraId);
885
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700886 if (mCameraHandle) {
887 LOGE("Failure: Camera already opened");
888 return ALREADY_EXISTS;
889 }
890
891 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700892 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700893 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700894 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700895 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700896 if (rc != 0) {
897 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
898 return rc;
899 }
900 }
901 }
902
Thierry Strudel3d639192016-09-09 11:52:26 -0700903 rc = openCamera();
904 if (rc == 0) {
905 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800906 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700907 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700908
909 // Suspend Easel because opening camera failed.
910 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700911 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700912 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
913 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700914 if (suspendErr != 0) {
915 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
916 strerror(-suspendErr), suspendErr);
917 }
918 }
919 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800920 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700921
Thierry Strudel3d639192016-09-09 11:52:26 -0700922 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
923 mCameraId, rc);
924
925 if (rc == NO_ERROR) {
926 mState = OPENED;
927 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800928
Thierry Strudel3d639192016-09-09 11:52:26 -0700929 return rc;
930}
931
932/*===========================================================================
933 * FUNCTION : openCamera
934 *
935 * DESCRIPTION: open camera
936 *
937 * PARAMETERS : none
938 *
939 * RETURN : int32_t type of status
940 * NO_ERROR -- success
941 * none-zero failure code
942 *==========================================================================*/
943int QCamera3HardwareInterface::openCamera()
944{
945 int rc = 0;
946 char value[PROPERTY_VALUE_MAX];
947
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800948 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800949
Thierry Strudel3d639192016-09-09 11:52:26 -0700950 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
951 if (rc < 0) {
952 LOGE("Failed to reserve flash for camera id: %d",
953 mCameraId);
954 return UNKNOWN_ERROR;
955 }
956
957 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
958 if (rc) {
959 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
960 return rc;
961 }
962
963 if (!mCameraHandle) {
964 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
965 return -ENODEV;
966 }
967
968 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
969 camEvtHandle, (void *)this);
970
971 if (rc < 0) {
972 LOGE("Error, failed to register event callback");
973 /* Not closing camera here since it is already handled in destructor */
974 return FAILED_TRANSACTION;
975 }
976
977 mExifParams.debug_params =
978 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
979 if (mExifParams.debug_params) {
980 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
981 } else {
982 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
983 return NO_MEMORY;
984 }
985 mFirstConfiguration = true;
986
987 //Notify display HAL that a camera session is active.
988 //But avoid calling the same during bootup because camera service might open/close
989 //cameras at boot time during its initialization and display service will also internally
990 //wait for camera service to initialize first while calling this display API, resulting in a
991 //deadlock situation. Since boot time camera open/close calls are made only to fetch
992 //capabilities, no need of this display bw optimization.
993 //Use "service.bootanim.exit" property to know boot status.
994 property_get("service.bootanim.exit", value, "0");
995 if (atoi(value) == 1) {
996 pthread_mutex_lock(&gCamLock);
997 if (gNumCameraSessions++ == 0) {
998 setCameraLaunchStatus(true);
999 }
1000 pthread_mutex_unlock(&gCamLock);
1001 }
1002
1003 //fill the session id needed while linking dual cam
1004 pthread_mutex_lock(&gCamLock);
1005 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1006 &sessionId[mCameraId]);
1007 pthread_mutex_unlock(&gCamLock);
1008
1009 if (rc < 0) {
1010 LOGE("Error, failed to get sessiion id");
1011 return UNKNOWN_ERROR;
1012 } else {
1013 //Allocate related cam sync buffer
1014 //this is needed for the payload that goes along with bundling cmd for related
1015 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001016 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1017 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001018 if(rc != OK) {
1019 rc = NO_MEMORY;
1020 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1021 return NO_MEMORY;
1022 }
1023
1024 //Map memory for related cam sync buffer
1025 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001026 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1027 m_pDualCamCmdHeap->getFd(0),
1028 sizeof(cam_dual_camera_cmd_info_t),
1029 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001030 if(rc < 0) {
1031 LOGE("Dualcam: failed to map Related cam sync buffer");
1032 rc = FAILED_TRANSACTION;
1033 return NO_MEMORY;
1034 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001035 m_pDualCamCmdPtr =
1036 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001037 }
1038
1039 LOGH("mCameraId=%d",mCameraId);
1040
1041 return NO_ERROR;
1042}
1043
1044/*===========================================================================
1045 * FUNCTION : closeCamera
1046 *
1047 * DESCRIPTION: close camera
1048 *
1049 * PARAMETERS : none
1050 *
1051 * RETURN : int32_t type of status
1052 * NO_ERROR -- success
1053 * none-zero failure code
1054 *==========================================================================*/
1055int QCamera3HardwareInterface::closeCamera()
1056{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001057 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001058 int rc = NO_ERROR;
1059 char value[PROPERTY_VALUE_MAX];
1060
1061 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1062 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001063
1064 // unmap memory for related cam sync buffer
1065 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001066 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001067 if (NULL != m_pDualCamCmdHeap) {
1068 m_pDualCamCmdHeap->deallocate();
1069 delete m_pDualCamCmdHeap;
1070 m_pDualCamCmdHeap = NULL;
1071 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001072 }
1073
Thierry Strudel3d639192016-09-09 11:52:26 -07001074 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1075 mCameraHandle = NULL;
1076
1077 //reset session id to some invalid id
1078 pthread_mutex_lock(&gCamLock);
1079 sessionId[mCameraId] = 0xDEADBEEF;
1080 pthread_mutex_unlock(&gCamLock);
1081
1082 //Notify display HAL that there is no active camera session
1083 //but avoid calling the same during bootup. Refer to openCamera
1084 //for more details.
1085 property_get("service.bootanim.exit", value, "0");
1086 if (atoi(value) == 1) {
1087 pthread_mutex_lock(&gCamLock);
1088 if (--gNumCameraSessions == 0) {
1089 setCameraLaunchStatus(false);
1090 }
1091 pthread_mutex_unlock(&gCamLock);
1092 }
1093
Thierry Strudel3d639192016-09-09 11:52:26 -07001094 if (mExifParams.debug_params) {
1095 free(mExifParams.debug_params);
1096 mExifParams.debug_params = NULL;
1097 }
1098 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1099 LOGW("Failed to release flash for camera id: %d",
1100 mCameraId);
1101 }
1102 mState = CLOSED;
1103 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1104 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001105
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001106 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001107 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1108 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001109 if (gHdrPlusClient != nullptr) {
1110 // Disable HDR+ mode.
1111 disableHdrPlusModeLocked();
1112 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001113 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001114 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001115 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001116
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001117 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001118 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001119 if (rc != 0) {
1120 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1121 }
1122
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001123 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001124 if (rc != 0) {
1125 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1126 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001127 }
1128 }
1129
Thierry Strudel3d639192016-09-09 11:52:26 -07001130 return rc;
1131}
1132
1133/*===========================================================================
1134 * FUNCTION : initialize
1135 *
1136 * DESCRIPTION: Initialize frameworks callback functions
1137 *
1138 * PARAMETERS :
1139 * @callback_ops : callback function to frameworks
1140 *
1141 * RETURN :
1142 *
1143 *==========================================================================*/
1144int QCamera3HardwareInterface::initialize(
1145 const struct camera3_callback_ops *callback_ops)
1146{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001147 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001148 int rc;
1149
1150 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1151 pthread_mutex_lock(&mMutex);
1152
1153 // Validate current state
1154 switch (mState) {
1155 case OPENED:
1156 /* valid state */
1157 break;
1158 default:
1159 LOGE("Invalid state %d", mState);
1160 rc = -ENODEV;
1161 goto err1;
1162 }
1163
1164 rc = initParameters();
1165 if (rc < 0) {
1166 LOGE("initParamters failed %d", rc);
1167 goto err1;
1168 }
1169 mCallbackOps = callback_ops;
1170
1171 mChannelHandle = mCameraHandle->ops->add_channel(
1172 mCameraHandle->camera_handle, NULL, NULL, this);
1173 if (mChannelHandle == 0) {
1174 LOGE("add_channel failed");
1175 rc = -ENOMEM;
1176 pthread_mutex_unlock(&mMutex);
1177 return rc;
1178 }
1179
1180 pthread_mutex_unlock(&mMutex);
1181 mCameraInitialized = true;
1182 mState = INITIALIZED;
1183 LOGI("X");
1184 return 0;
1185
1186err1:
1187 pthread_mutex_unlock(&mMutex);
1188 return rc;
1189}
1190
1191/*===========================================================================
1192 * FUNCTION : validateStreamDimensions
1193 *
1194 * DESCRIPTION: Check if the configuration requested are those advertised
1195 *
1196 * PARAMETERS :
1197 * @stream_list : streams to be configured
1198 *
1199 * RETURN :
1200 *
1201 *==========================================================================*/
1202int QCamera3HardwareInterface::validateStreamDimensions(
1203 camera3_stream_configuration_t *streamList)
1204{
1205 int rc = NO_ERROR;
1206 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001207 uint32_t depthWidth = 0;
1208 uint32_t depthHeight = 0;
1209 if (mPDSupported) {
1210 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1211 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1212 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001213
1214 camera3_stream_t *inputStream = NULL;
1215 /*
1216 * Loop through all streams to find input stream if it exists*
1217 */
1218 for (size_t i = 0; i< streamList->num_streams; i++) {
1219 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1220 if (inputStream != NULL) {
1221 LOGE("Error, Multiple input streams requested");
1222 return -EINVAL;
1223 }
1224 inputStream = streamList->streams[i];
1225 }
1226 }
1227 /*
1228 * Loop through all streams requested in configuration
1229 * Check if unsupported sizes have been requested on any of them
1230 */
1231 for (size_t j = 0; j < streamList->num_streams; j++) {
1232 bool sizeFound = false;
1233 camera3_stream_t *newStream = streamList->streams[j];
1234
1235 uint32_t rotatedHeight = newStream->height;
1236 uint32_t rotatedWidth = newStream->width;
1237 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1238 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1239 rotatedHeight = newStream->width;
1240 rotatedWidth = newStream->height;
1241 }
1242
1243 /*
1244 * Sizes are different for each type of stream format check against
1245 * appropriate table.
1246 */
1247 switch (newStream->format) {
1248 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1249 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1250 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001251 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1252 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1253 mPDSupported) {
1254 if ((depthWidth == newStream->width) &&
1255 (depthHeight == newStream->height)) {
1256 sizeFound = true;
1257 }
1258 break;
1259 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001260 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1261 for (size_t i = 0; i < count; i++) {
1262 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1263 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1264 sizeFound = true;
1265 break;
1266 }
1267 }
1268 break;
1269 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001270 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1271 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001272 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001273 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001274 if ((depthSamplesCount == newStream->width) &&
1275 (1 == newStream->height)) {
1276 sizeFound = true;
1277 }
1278 break;
1279 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001280 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1281 /* Verify set size against generated sizes table */
1282 for (size_t i = 0; i < count; i++) {
1283 if (((int32_t)rotatedWidth ==
1284 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1285 ((int32_t)rotatedHeight ==
1286 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1287 sizeFound = true;
1288 break;
1289 }
1290 }
1291 break;
1292 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1293 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1294 default:
1295 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1296 || newStream->stream_type == CAMERA3_STREAM_INPUT
1297 || IS_USAGE_ZSL(newStream->usage)) {
1298 if (((int32_t)rotatedWidth ==
1299 gCamCapability[mCameraId]->active_array_size.width) &&
1300 ((int32_t)rotatedHeight ==
1301 gCamCapability[mCameraId]->active_array_size.height)) {
1302 sizeFound = true;
1303 break;
1304 }
1305 /* We could potentially break here to enforce ZSL stream
1306 * set from frameworks always is full active array size
1307 * but it is not clear from the spc if framework will always
1308 * follow that, also we have logic to override to full array
1309 * size, so keeping the logic lenient at the moment
1310 */
1311 }
1312 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1313 MAX_SIZES_CNT);
1314 for (size_t i = 0; i < count; i++) {
1315 if (((int32_t)rotatedWidth ==
1316 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1317 ((int32_t)rotatedHeight ==
1318 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1319 sizeFound = true;
1320 break;
1321 }
1322 }
1323 break;
1324 } /* End of switch(newStream->format) */
1325
1326 /* We error out even if a single stream has unsupported size set */
1327 if (!sizeFound) {
1328 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1329 rotatedWidth, rotatedHeight, newStream->format,
1330 gCamCapability[mCameraId]->active_array_size.width,
1331 gCamCapability[mCameraId]->active_array_size.height);
1332 rc = -EINVAL;
1333 break;
1334 }
1335 } /* End of for each stream */
1336 return rc;
1337}
1338
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001339/*===========================================================================
1340 * FUNCTION : validateUsageFlags
1341 *
1342 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1343 *
1344 * PARAMETERS :
1345 * @stream_list : streams to be configured
1346 *
1347 * RETURN :
1348 * NO_ERROR if the usage flags are supported
1349 * error code if usage flags are not supported
1350 *
1351 *==========================================================================*/
1352int QCamera3HardwareInterface::validateUsageFlags(
1353 const camera3_stream_configuration_t* streamList)
1354{
1355 for (size_t j = 0; j < streamList->num_streams; j++) {
1356 const camera3_stream_t *newStream = streamList->streams[j];
1357
1358 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1359 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1360 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1361 continue;
1362 }
1363
Jason Leec4cf5032017-05-24 18:31:41 -07001364 // Here we only care whether it's EIS3 or not
1365 char is_type_value[PROPERTY_VALUE_MAX];
1366 property_get("persist.camera.is_type", is_type_value, "4");
1367 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1368 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1369 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1370 isType = IS_TYPE_NONE;
1371
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001372 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1373 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1374 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1375 bool forcePreviewUBWC = true;
1376 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1377 forcePreviewUBWC = false;
1378 }
1379 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001380 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001381 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001382 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001383 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001384 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001385
1386 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1387 // So color spaces will always match.
1388
1389 // Check whether underlying formats of shared streams match.
1390 if (isVideo && isPreview && videoFormat != previewFormat) {
1391 LOGE("Combined video and preview usage flag is not supported");
1392 return -EINVAL;
1393 }
1394 if (isPreview && isZSL && previewFormat != zslFormat) {
1395 LOGE("Combined preview and zsl usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 if (isVideo && isZSL && videoFormat != zslFormat) {
1399 LOGE("Combined video and zsl usage flag is not supported");
1400 return -EINVAL;
1401 }
1402 }
1403 return NO_ERROR;
1404}
1405
1406/*===========================================================================
1407 * FUNCTION : validateUsageFlagsForEis
1408 *
1409 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1410 *
1411 * PARAMETERS :
1412 * @stream_list : streams to be configured
1413 *
1414 * RETURN :
1415 * NO_ERROR if the usage flags are supported
1416 * error code if usage flags are not supported
1417 *
1418 *==========================================================================*/
1419int QCamera3HardwareInterface::validateUsageFlagsForEis(
1420 const camera3_stream_configuration_t* streamList)
1421{
1422 for (size_t j = 0; j < streamList->num_streams; j++) {
1423 const camera3_stream_t *newStream = streamList->streams[j];
1424
1425 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1426 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1427
1428 // Because EIS is "hard-coded" for certain use case, and current
1429 // implementation doesn't support shared preview and video on the same
1430 // stream, return failure if EIS is forced on.
1431 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1432 LOGE("Combined video and preview usage flag is not supported due to EIS");
1433 return -EINVAL;
1434 }
1435 }
1436 return NO_ERROR;
1437}
1438
Thierry Strudel3d639192016-09-09 11:52:26 -07001439/*==============================================================================
1440 * FUNCTION : isSupportChannelNeeded
1441 *
1442 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1443 *
1444 * PARAMETERS :
1445 * @stream_list : streams to be configured
1446 * @stream_config_info : the config info for streams to be configured
1447 *
1448 * RETURN : Boolen true/false decision
1449 *
1450 *==========================================================================*/
1451bool QCamera3HardwareInterface::isSupportChannelNeeded(
1452 camera3_stream_configuration_t *streamList,
1453 cam_stream_size_info_t stream_config_info)
1454{
1455 uint32_t i;
1456 bool pprocRequested = false;
1457 /* Check for conditions where PProc pipeline does not have any streams*/
1458 for (i = 0; i < stream_config_info.num_streams; i++) {
1459 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1460 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1461 pprocRequested = true;
1462 break;
1463 }
1464 }
1465
1466 if (pprocRequested == false )
1467 return true;
1468
1469 /* Dummy stream needed if only raw or jpeg streams present */
1470 for (i = 0; i < streamList->num_streams; i++) {
1471 switch(streamList->streams[i]->format) {
1472 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1473 case HAL_PIXEL_FORMAT_RAW10:
1474 case HAL_PIXEL_FORMAT_RAW16:
1475 case HAL_PIXEL_FORMAT_BLOB:
1476 break;
1477 default:
1478 return false;
1479 }
1480 }
1481 return true;
1482}
1483
1484/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001485 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001486 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001487 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001488 *
1489 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001490 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001491 *
1492 * RETURN : int32_t type of status
1493 * NO_ERROR -- success
1494 * none-zero failure code
1495 *
1496 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001497int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001498{
1499 int32_t rc = NO_ERROR;
1500
1501 cam_dimension_t max_dim = {0, 0};
1502 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1503 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1504 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1505 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1506 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1507 }
1508
1509 clear_metadata_buffer(mParameters);
1510
1511 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1512 max_dim);
1513 if (rc != NO_ERROR) {
1514 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1515 return rc;
1516 }
1517
1518 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1519 if (rc != NO_ERROR) {
1520 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1521 return rc;
1522 }
1523
1524 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001525 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001526
1527 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1528 mParameters);
1529 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001530 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001531 return rc;
1532 }
1533
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001534 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001535 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1536 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1537 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1538 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1539 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001540
1541 return rc;
1542}
1543
1544/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001545 * FUNCTION : getCurrentSensorModeInfo
1546 *
1547 * DESCRIPTION: Get sensor mode information that is currently selected.
1548 *
1549 * PARAMETERS :
1550 * @sensorModeInfo : sensor mode information (output)
1551 *
1552 * RETURN : int32_t type of status
1553 * NO_ERROR -- success
1554 * none-zero failure code
1555 *
1556 *==========================================================================*/
1557int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1558{
1559 int32_t rc = NO_ERROR;
1560
1561 clear_metadata_buffer(mParameters);
1562 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1563
1564 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1565 mParameters);
1566 if (rc != NO_ERROR) {
1567 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1568 return rc;
1569 }
1570
1571 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1572 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1573 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1574 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1575 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1576 sensorModeInfo.num_raw_bits);
1577
1578 return rc;
1579}
1580
1581/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001582 * FUNCTION : addToPPFeatureMask
1583 *
1584 * DESCRIPTION: add additional features to pp feature mask based on
1585 * stream type and usecase
1586 *
1587 * PARAMETERS :
1588 * @stream_format : stream type for feature mask
1589 * @stream_idx : stream idx within postprocess_mask list to change
1590 *
1591 * RETURN : NULL
1592 *
1593 *==========================================================================*/
1594void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1595 uint32_t stream_idx)
1596{
1597 char feature_mask_value[PROPERTY_VALUE_MAX];
1598 cam_feature_mask_t feature_mask;
1599 int args_converted;
1600 int property_len;
1601
1602 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001603#ifdef _LE_CAMERA_
1604 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1605 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1606 property_len = property_get("persist.camera.hal3.feature",
1607 feature_mask_value, swtnr_feature_mask_value);
1608#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001609 property_len = property_get("persist.camera.hal3.feature",
1610 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001611#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001612 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1613 (feature_mask_value[1] == 'x')) {
1614 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1615 } else {
1616 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1617 }
1618 if (1 != args_converted) {
1619 feature_mask = 0;
1620 LOGE("Wrong feature mask %s", feature_mask_value);
1621 return;
1622 }
1623
1624 switch (stream_format) {
1625 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1626 /* Add LLVD to pp feature mask only if video hint is enabled */
1627 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1628 mStreamConfigInfo.postprocess_mask[stream_idx]
1629 |= CAM_QTI_FEATURE_SW_TNR;
1630 LOGH("Added SW TNR to pp feature mask");
1631 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1632 mStreamConfigInfo.postprocess_mask[stream_idx]
1633 |= CAM_QCOM_FEATURE_LLVD;
1634 LOGH("Added LLVD SeeMore to pp feature mask");
1635 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001636 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1637 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1638 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1639 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001640 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1641 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1642 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1643 CAM_QTI_FEATURE_BINNING_CORRECTION;
1644 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001645 break;
1646 }
1647 default:
1648 break;
1649 }
1650 LOGD("PP feature mask %llx",
1651 mStreamConfigInfo.postprocess_mask[stream_idx]);
1652}
1653
1654/*==============================================================================
1655 * FUNCTION : updateFpsInPreviewBuffer
1656 *
1657 * DESCRIPTION: update FPS information in preview buffer.
1658 *
1659 * PARAMETERS :
1660 * @metadata : pointer to metadata buffer
1661 * @frame_number: frame_number to look for in pending buffer list
1662 *
1663 * RETURN : None
1664 *
1665 *==========================================================================*/
1666void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1667 uint32_t frame_number)
1668{
1669 // Mark all pending buffers for this particular request
1670 // with corresponding framerate information
1671 for (List<PendingBuffersInRequest>::iterator req =
1672 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1673 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1674 for(List<PendingBufferInfo>::iterator j =
1675 req->mPendingBufferList.begin();
1676 j != req->mPendingBufferList.end(); j++) {
1677 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1678 if ((req->frame_number == frame_number) &&
1679 (channel->getStreamTypeMask() &
1680 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1681 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1682 CAM_INTF_PARM_FPS_RANGE, metadata) {
1683 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1684 struct private_handle_t *priv_handle =
1685 (struct private_handle_t *)(*(j->buffer));
1686 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1687 }
1688 }
1689 }
1690 }
1691}
1692
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001693/*==============================================================================
1694 * FUNCTION : updateTimeStampInPendingBuffers
1695 *
1696 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1697 * of a frame number
1698 *
1699 * PARAMETERS :
1700 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1701 * @timestamp : timestamp to be set
1702 *
1703 * RETURN : None
1704 *
1705 *==========================================================================*/
1706void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1707 uint32_t frameNumber, nsecs_t timestamp)
1708{
1709 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1710 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1711 if (req->frame_number != frameNumber)
1712 continue;
1713
1714 for (auto k = req->mPendingBufferList.begin();
1715 k != req->mPendingBufferList.end(); k++ ) {
1716 struct private_handle_t *priv_handle =
1717 (struct private_handle_t *) (*(k->buffer));
1718 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1719 }
1720 }
1721 return;
1722}
1723
Thierry Strudel3d639192016-09-09 11:52:26 -07001724/*===========================================================================
1725 * FUNCTION : configureStreams
1726 *
1727 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1728 * and output streams.
1729 *
1730 * PARAMETERS :
1731 * @stream_list : streams to be configured
1732 *
1733 * RETURN :
1734 *
1735 *==========================================================================*/
1736int QCamera3HardwareInterface::configureStreams(
1737 camera3_stream_configuration_t *streamList)
1738{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001739 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001740 int rc = 0;
1741
1742 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001743 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001744 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001745 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001746
1747 return rc;
1748}
1749
1750/*===========================================================================
1751 * FUNCTION : configureStreamsPerfLocked
1752 *
1753 * DESCRIPTION: configureStreams while perfLock is held.
1754 *
1755 * PARAMETERS :
1756 * @stream_list : streams to be configured
1757 *
1758 * RETURN : int32_t type of status
1759 * NO_ERROR -- success
1760 * none-zero failure code
1761 *==========================================================================*/
1762int QCamera3HardwareInterface::configureStreamsPerfLocked(
1763 camera3_stream_configuration_t *streamList)
1764{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001765 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001766 int rc = 0;
1767
1768 // Sanity check stream_list
1769 if (streamList == NULL) {
1770 LOGE("NULL stream configuration");
1771 return BAD_VALUE;
1772 }
1773 if (streamList->streams == NULL) {
1774 LOGE("NULL stream list");
1775 return BAD_VALUE;
1776 }
1777
1778 if (streamList->num_streams < 1) {
1779 LOGE("Bad number of streams requested: %d",
1780 streamList->num_streams);
1781 return BAD_VALUE;
1782 }
1783
1784 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1785 LOGE("Maximum number of streams %d exceeded: %d",
1786 MAX_NUM_STREAMS, streamList->num_streams);
1787 return BAD_VALUE;
1788 }
1789
Jason Leec4cf5032017-05-24 18:31:41 -07001790 mOpMode = streamList->operation_mode;
1791 LOGD("mOpMode: %d", mOpMode);
1792
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001793 rc = validateUsageFlags(streamList);
1794 if (rc != NO_ERROR) {
1795 return rc;
1796 }
1797
Thierry Strudel3d639192016-09-09 11:52:26 -07001798 /* first invalidate all the steams in the mStreamList
1799 * if they appear again, they will be validated */
1800 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1801 it != mStreamInfo.end(); it++) {
1802 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1803 if (channel) {
1804 channel->stop();
1805 }
1806 (*it)->status = INVALID;
1807 }
1808
1809 if (mRawDumpChannel) {
1810 mRawDumpChannel->stop();
1811 delete mRawDumpChannel;
1812 mRawDumpChannel = NULL;
1813 }
1814
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001815 if (mHdrPlusRawSrcChannel) {
1816 mHdrPlusRawSrcChannel->stop();
1817 delete mHdrPlusRawSrcChannel;
1818 mHdrPlusRawSrcChannel = NULL;
1819 }
1820
Thierry Strudel3d639192016-09-09 11:52:26 -07001821 if (mSupportChannel)
1822 mSupportChannel->stop();
1823
1824 if (mAnalysisChannel) {
1825 mAnalysisChannel->stop();
1826 }
1827 if (mMetadataChannel) {
1828 /* If content of mStreamInfo is not 0, there is metadata stream */
1829 mMetadataChannel->stop();
1830 }
1831 if (mChannelHandle) {
1832 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001833 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001834 LOGD("stopping channel %d", mChannelHandle);
1835 }
1836
1837 pthread_mutex_lock(&mMutex);
1838
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001839 mPictureChannel = NULL;
1840
Thierry Strudel3d639192016-09-09 11:52:26 -07001841 // Check state
1842 switch (mState) {
1843 case INITIALIZED:
1844 case CONFIGURED:
1845 case STARTED:
1846 /* valid state */
1847 break;
1848 default:
1849 LOGE("Invalid state %d", mState);
1850 pthread_mutex_unlock(&mMutex);
1851 return -ENODEV;
1852 }
1853
1854 /* Check whether we have video stream */
1855 m_bIs4KVideo = false;
1856 m_bIsVideo = false;
1857 m_bEisSupportedSize = false;
1858 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001859 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001860 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001861 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001862 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001863 uint32_t videoWidth = 0U;
1864 uint32_t videoHeight = 0U;
1865 size_t rawStreamCnt = 0;
1866 size_t stallStreamCnt = 0;
1867 size_t processedStreamCnt = 0;
1868 // Number of streams on ISP encoder path
1869 size_t numStreamsOnEncoder = 0;
1870 size_t numYuv888OnEncoder = 0;
1871 bool bYuv888OverrideJpeg = false;
1872 cam_dimension_t largeYuv888Size = {0, 0};
1873 cam_dimension_t maxViewfinderSize = {0, 0};
1874 bool bJpegExceeds4K = false;
1875 bool bJpegOnEncoder = false;
1876 bool bUseCommonFeatureMask = false;
1877 cam_feature_mask_t commonFeatureMask = 0;
1878 bool bSmallJpegSize = false;
1879 uint32_t width_ratio;
1880 uint32_t height_ratio;
1881 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1882 camera3_stream_t *inputStream = NULL;
1883 bool isJpeg = false;
1884 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001885 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001886 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001887
1888 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1889
1890 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001891 uint8_t eis_prop_set;
1892 uint32_t maxEisWidth = 0;
1893 uint32_t maxEisHeight = 0;
1894
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001895 // Initialize all instant AEC related variables
1896 mInstantAEC = false;
1897 mResetInstantAEC = false;
1898 mInstantAECSettledFrameNumber = 0;
1899 mAecSkipDisplayFrameBound = 0;
1900 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001901 mCurrFeatureState = 0;
1902 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001903
Thierry Strudel3d639192016-09-09 11:52:26 -07001904 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1905
1906 size_t count = IS_TYPE_MAX;
1907 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1908 for (size_t i = 0; i < count; i++) {
1909 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001910 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1911 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001912 break;
1913 }
1914 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001915
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001916 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001917 maxEisWidth = MAX_EIS_WIDTH;
1918 maxEisHeight = MAX_EIS_HEIGHT;
1919 }
1920
1921 /* EIS setprop control */
1922 char eis_prop[PROPERTY_VALUE_MAX];
1923 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001924 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001925 eis_prop_set = (uint8_t)atoi(eis_prop);
1926
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001927 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001928 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1929
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001930 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1931 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001932
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 /* stream configurations */
1934 for (size_t i = 0; i < streamList->num_streams; i++) {
1935 camera3_stream_t *newStream = streamList->streams[i];
1936 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1937 "height = %d, rotation = %d, usage = 0x%x",
1938 i, newStream->stream_type, newStream->format,
1939 newStream->width, newStream->height, newStream->rotation,
1940 newStream->usage);
1941 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1942 newStream->stream_type == CAMERA3_STREAM_INPUT){
1943 isZsl = true;
1944 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001945 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1946 IS_USAGE_PREVIEW(newStream->usage)) {
1947 isPreview = true;
1948 }
1949
Thierry Strudel3d639192016-09-09 11:52:26 -07001950 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1951 inputStream = newStream;
1952 }
1953
Emilian Peev7650c122017-01-19 08:24:33 -08001954 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1955 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001956 isJpeg = true;
1957 jpegSize.width = newStream->width;
1958 jpegSize.height = newStream->height;
1959 if (newStream->width > VIDEO_4K_WIDTH ||
1960 newStream->height > VIDEO_4K_HEIGHT)
1961 bJpegExceeds4K = true;
1962 }
1963
1964 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1965 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1966 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001967 // In HAL3 we can have multiple different video streams.
1968 // The variables video width and height are used below as
1969 // dimensions of the biggest of them
1970 if (videoWidth < newStream->width ||
1971 videoHeight < newStream->height) {
1972 videoWidth = newStream->width;
1973 videoHeight = newStream->height;
1974 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001975 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1976 (VIDEO_4K_HEIGHT <= newStream->height)) {
1977 m_bIs4KVideo = true;
1978 }
1979 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1980 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001981
Thierry Strudel3d639192016-09-09 11:52:26 -07001982 }
1983 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1984 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1985 switch (newStream->format) {
1986 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001987 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1988 depthPresent = true;
1989 break;
1990 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001991 stallStreamCnt++;
1992 if (isOnEncoder(maxViewfinderSize, newStream->width,
1993 newStream->height)) {
1994 numStreamsOnEncoder++;
1995 bJpegOnEncoder = true;
1996 }
1997 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1998 newStream->width);
1999 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2000 newStream->height);;
2001 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2002 "FATAL: max_downscale_factor cannot be zero and so assert");
2003 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2004 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2005 LOGH("Setting small jpeg size flag to true");
2006 bSmallJpegSize = true;
2007 }
2008 break;
2009 case HAL_PIXEL_FORMAT_RAW10:
2010 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2011 case HAL_PIXEL_FORMAT_RAW16:
2012 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002013 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2014 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2015 pdStatCount++;
2016 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002017 break;
2018 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2019 processedStreamCnt++;
2020 if (isOnEncoder(maxViewfinderSize, newStream->width,
2021 newStream->height)) {
2022 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2023 !IS_USAGE_ZSL(newStream->usage)) {
2024 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2025 }
2026 numStreamsOnEncoder++;
2027 }
2028 break;
2029 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2030 processedStreamCnt++;
2031 if (isOnEncoder(maxViewfinderSize, newStream->width,
2032 newStream->height)) {
2033 // If Yuv888 size is not greater than 4K, set feature mask
2034 // to SUPERSET so that it support concurrent request on
2035 // YUV and JPEG.
2036 if (newStream->width <= VIDEO_4K_WIDTH &&
2037 newStream->height <= VIDEO_4K_HEIGHT) {
2038 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2039 }
2040 numStreamsOnEncoder++;
2041 numYuv888OnEncoder++;
2042 largeYuv888Size.width = newStream->width;
2043 largeYuv888Size.height = newStream->height;
2044 }
2045 break;
2046 default:
2047 processedStreamCnt++;
2048 if (isOnEncoder(maxViewfinderSize, newStream->width,
2049 newStream->height)) {
2050 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2051 numStreamsOnEncoder++;
2052 }
2053 break;
2054 }
2055
2056 }
2057 }
2058
2059 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2060 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2061 !m_bIsVideo) {
2062 m_bEisEnable = false;
2063 }
2064
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002065 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2066 pthread_mutex_unlock(&mMutex);
2067 return -EINVAL;
2068 }
2069
Thierry Strudel54dc9782017-02-15 12:12:10 -08002070 uint8_t forceEnableTnr = 0;
2071 char tnr_prop[PROPERTY_VALUE_MAX];
2072 memset(tnr_prop, 0, sizeof(tnr_prop));
2073 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2074 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2075
Thierry Strudel3d639192016-09-09 11:52:26 -07002076 /* Logic to enable/disable TNR based on specific config size/etc.*/
2077 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2079 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002080 else if (forceEnableTnr)
2081 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002082
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002083 char videoHdrProp[PROPERTY_VALUE_MAX];
2084 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2085 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2086 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2087
2088 if (hdr_mode_prop == 1 && m_bIsVideo &&
2089 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2090 m_bVideoHdrEnabled = true;
2091 else
2092 m_bVideoHdrEnabled = false;
2093
2094
Thierry Strudel3d639192016-09-09 11:52:26 -07002095 /* Check if num_streams is sane */
2096 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2097 rawStreamCnt > MAX_RAW_STREAMS ||
2098 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2099 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2100 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2101 pthread_mutex_unlock(&mMutex);
2102 return -EINVAL;
2103 }
2104 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002105 if (isZsl && m_bIs4KVideo) {
2106 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002107 pthread_mutex_unlock(&mMutex);
2108 return -EINVAL;
2109 }
2110 /* Check if stream sizes are sane */
2111 if (numStreamsOnEncoder > 2) {
2112 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2113 pthread_mutex_unlock(&mMutex);
2114 return -EINVAL;
2115 } else if (1 < numStreamsOnEncoder){
2116 bUseCommonFeatureMask = true;
2117 LOGH("Multiple streams above max viewfinder size, common mask needed");
2118 }
2119
2120 /* Check if BLOB size is greater than 4k in 4k recording case */
2121 if (m_bIs4KVideo && bJpegExceeds4K) {
2122 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2123 pthread_mutex_unlock(&mMutex);
2124 return -EINVAL;
2125 }
2126
Emilian Peev7650c122017-01-19 08:24:33 -08002127 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2128 depthPresent) {
2129 LOGE("HAL doesn't support depth streams in HFR mode!");
2130 pthread_mutex_unlock(&mMutex);
2131 return -EINVAL;
2132 }
2133
Thierry Strudel3d639192016-09-09 11:52:26 -07002134 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2135 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2136 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2137 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2138 // configurations:
2139 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2140 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2141 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2142 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2143 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2144 __func__);
2145 pthread_mutex_unlock(&mMutex);
2146 return -EINVAL;
2147 }
2148
2149 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2150 // the YUV stream's size is greater or equal to the JPEG size, set common
2151 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2152 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2153 jpegSize.width, jpegSize.height) &&
2154 largeYuv888Size.width > jpegSize.width &&
2155 largeYuv888Size.height > jpegSize.height) {
2156 bYuv888OverrideJpeg = true;
2157 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2158 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2159 }
2160
2161 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2162 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2163 commonFeatureMask);
2164 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2165 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2166
2167 rc = validateStreamDimensions(streamList);
2168 if (rc == NO_ERROR) {
2169 rc = validateStreamRotations(streamList);
2170 }
2171 if (rc != NO_ERROR) {
2172 LOGE("Invalid stream configuration requested!");
2173 pthread_mutex_unlock(&mMutex);
2174 return rc;
2175 }
2176
Emilian Peev0f3c3162017-03-15 12:57:46 +00002177 if (1 < pdStatCount) {
2178 LOGE("HAL doesn't support multiple PD streams");
2179 pthread_mutex_unlock(&mMutex);
2180 return -EINVAL;
2181 }
2182
2183 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2184 (1 == pdStatCount)) {
2185 LOGE("HAL doesn't support PD streams in HFR mode!");
2186 pthread_mutex_unlock(&mMutex);
2187 return -EINVAL;
2188 }
2189
Thierry Strudel3d639192016-09-09 11:52:26 -07002190 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2191 for (size_t i = 0; i < streamList->num_streams; i++) {
2192 camera3_stream_t *newStream = streamList->streams[i];
2193 LOGH("newStream type = %d, stream format = %d "
2194 "stream size : %d x %d, stream rotation = %d",
2195 newStream->stream_type, newStream->format,
2196 newStream->width, newStream->height, newStream->rotation);
2197 //if the stream is in the mStreamList validate it
2198 bool stream_exists = false;
2199 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2200 it != mStreamInfo.end(); it++) {
2201 if ((*it)->stream == newStream) {
2202 QCamera3ProcessingChannel *channel =
2203 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2204 stream_exists = true;
2205 if (channel)
2206 delete channel;
2207 (*it)->status = VALID;
2208 (*it)->stream->priv = NULL;
2209 (*it)->channel = NULL;
2210 }
2211 }
2212 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2213 //new stream
2214 stream_info_t* stream_info;
2215 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2216 if (!stream_info) {
2217 LOGE("Could not allocate stream info");
2218 rc = -ENOMEM;
2219 pthread_mutex_unlock(&mMutex);
2220 return rc;
2221 }
2222 stream_info->stream = newStream;
2223 stream_info->status = VALID;
2224 stream_info->channel = NULL;
2225 mStreamInfo.push_back(stream_info);
2226 }
2227 /* Covers Opaque ZSL and API1 F/W ZSL */
2228 if (IS_USAGE_ZSL(newStream->usage)
2229 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2230 if (zslStream != NULL) {
2231 LOGE("Multiple input/reprocess streams requested!");
2232 pthread_mutex_unlock(&mMutex);
2233 return BAD_VALUE;
2234 }
2235 zslStream = newStream;
2236 }
2237 /* Covers YUV reprocess */
2238 if (inputStream != NULL) {
2239 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2240 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2241 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2242 && inputStream->width == newStream->width
2243 && inputStream->height == newStream->height) {
2244 if (zslStream != NULL) {
2245 /* This scenario indicates multiple YUV streams with same size
2246 * as input stream have been requested, since zsl stream handle
2247 * is solely use for the purpose of overriding the size of streams
2248 * which share h/w streams we will just make a guess here as to
2249 * which of the stream is a ZSL stream, this will be refactored
2250 * once we make generic logic for streams sharing encoder output
2251 */
2252 LOGH("Warning, Multiple ip/reprocess streams requested!");
2253 }
2254 zslStream = newStream;
2255 }
2256 }
2257 }
2258
2259 /* If a zsl stream is set, we know that we have configured at least one input or
2260 bidirectional stream */
2261 if (NULL != zslStream) {
2262 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2263 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2264 mInputStreamInfo.format = zslStream->format;
2265 mInputStreamInfo.usage = zslStream->usage;
2266 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2267 mInputStreamInfo.dim.width,
2268 mInputStreamInfo.dim.height,
2269 mInputStreamInfo.format, mInputStreamInfo.usage);
2270 }
2271
2272 cleanAndSortStreamInfo();
2273 if (mMetadataChannel) {
2274 delete mMetadataChannel;
2275 mMetadataChannel = NULL;
2276 }
2277 if (mSupportChannel) {
2278 delete mSupportChannel;
2279 mSupportChannel = NULL;
2280 }
2281
2282 if (mAnalysisChannel) {
2283 delete mAnalysisChannel;
2284 mAnalysisChannel = NULL;
2285 }
2286
2287 if (mDummyBatchChannel) {
2288 delete mDummyBatchChannel;
2289 mDummyBatchChannel = NULL;
2290 }
2291
Emilian Peev7650c122017-01-19 08:24:33 -08002292 if (mDepthChannel) {
2293 mDepthChannel = NULL;
2294 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002295 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002296
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002297 mShutterDispatcher.clear();
2298 mOutputBufferDispatcher.clear();
2299
Thierry Strudel2896d122017-02-23 19:18:03 -08002300 char is_type_value[PROPERTY_VALUE_MAX];
2301 property_get("persist.camera.is_type", is_type_value, "4");
2302 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2303
Binhao Line406f062017-05-03 14:39:44 -07002304 char property_value[PROPERTY_VALUE_MAX];
2305 property_get("persist.camera.gzoom.at", property_value, "0");
2306 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002307 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2308 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2309 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2310 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002311
2312 property_get("persist.camera.gzoom.4k", property_value, "0");
2313 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2314
Thierry Strudel3d639192016-09-09 11:52:26 -07002315 //Create metadata channel and initialize it
2316 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2317 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2318 gCamCapability[mCameraId]->color_arrangement);
2319 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2320 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002321 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002322 if (mMetadataChannel == NULL) {
2323 LOGE("failed to allocate metadata channel");
2324 rc = -ENOMEM;
2325 pthread_mutex_unlock(&mMutex);
2326 return rc;
2327 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002328 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002329 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2330 if (rc < 0) {
2331 LOGE("metadata channel initialization failed");
2332 delete mMetadataChannel;
2333 mMetadataChannel = NULL;
2334 pthread_mutex_unlock(&mMutex);
2335 return rc;
2336 }
2337
Thierry Strudel2896d122017-02-23 19:18:03 -08002338 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002339 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002340 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002341 // Keep track of preview/video streams indices.
2342 // There could be more than one preview streams, but only one video stream.
2343 int32_t video_stream_idx = -1;
2344 int32_t preview_stream_idx[streamList->num_streams];
2345 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002346 bool previewTnr[streamList->num_streams];
2347 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2348 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2349 // Loop through once to determine preview TNR conditions before creating channels.
2350 for (size_t i = 0; i < streamList->num_streams; i++) {
2351 camera3_stream_t *newStream = streamList->streams[i];
2352 uint32_t stream_usage = newStream->usage;
2353 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2354 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2355 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2356 video_stream_idx = (int32_t)i;
2357 else
2358 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2359 }
2360 }
2361 // By default, preview stream TNR is disabled.
2362 // Enable TNR to the preview stream if all conditions below are satisfied:
2363 // 1. preview resolution == video resolution.
2364 // 2. video stream TNR is enabled.
2365 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2366 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2367 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2368 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2369 if (m_bTnrEnabled && m_bTnrVideo &&
2370 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2371 video_stream->width == preview_stream->width &&
2372 video_stream->height == preview_stream->height) {
2373 previewTnr[preview_stream_idx[i]] = true;
2374 }
2375 }
2376
Thierry Strudel3d639192016-09-09 11:52:26 -07002377 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2378 /* Allocate channel objects for the requested streams */
2379 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002380
Thierry Strudel3d639192016-09-09 11:52:26 -07002381 camera3_stream_t *newStream = streamList->streams[i];
2382 uint32_t stream_usage = newStream->usage;
2383 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2384 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2385 struct camera_info *p_info = NULL;
2386 pthread_mutex_lock(&gCamLock);
2387 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2388 pthread_mutex_unlock(&gCamLock);
2389 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2390 || IS_USAGE_ZSL(newStream->usage)) &&
2391 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002392 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002393 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002394 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2395 if (bUseCommonFeatureMask)
2396 zsl_ppmask = commonFeatureMask;
2397 else
2398 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002399 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002400 if (numStreamsOnEncoder > 0)
2401 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2402 else
2403 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002404 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002405 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002406 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002407 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002408 LOGH("Input stream configured, reprocess config");
2409 } else {
2410 //for non zsl streams find out the format
2411 switch (newStream->format) {
2412 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2413 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2416 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2417 /* add additional features to pp feature mask */
2418 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2419 mStreamConfigInfo.num_streams);
2420
2421 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2422 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2423 CAM_STREAM_TYPE_VIDEO;
2424 if (m_bTnrEnabled && m_bTnrVideo) {
2425 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2426 CAM_QCOM_FEATURE_CPP_TNR;
2427 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2428 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2429 ~CAM_QCOM_FEATURE_CDS;
2430 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002431 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2432 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2433 CAM_QTI_FEATURE_PPEISCORE;
2434 }
Binhao Line406f062017-05-03 14:39:44 -07002435 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2436 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2437 CAM_QCOM_FEATURE_GOOG_ZOOM;
2438 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002439 } else {
2440 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2441 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002442 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002443 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2444 CAM_QCOM_FEATURE_CPP_TNR;
2445 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2446 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2447 ~CAM_QCOM_FEATURE_CDS;
2448 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002449 if(!m_bSwTnrPreview) {
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2451 ~CAM_QTI_FEATURE_SW_TNR;
2452 }
Binhao Line406f062017-05-03 14:39:44 -07002453 if (is_goog_zoom_preview_enabled) {
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2455 CAM_QCOM_FEATURE_GOOG_ZOOM;
2456 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002457 padding_info.width_padding = mSurfaceStridePadding;
2458 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002459 previewSize.width = (int32_t)newStream->width;
2460 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002461 }
2462 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2463 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2464 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2465 newStream->height;
2466 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2467 newStream->width;
2468 }
2469 }
2470 break;
2471 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002472 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002473 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2474 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2475 if (bUseCommonFeatureMask)
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2477 commonFeatureMask;
2478 else
2479 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2480 CAM_QCOM_FEATURE_NONE;
2481 } else {
2482 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2483 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2484 }
2485 break;
2486 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002487 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002488 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2489 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2490 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2491 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2492 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002493 /* Remove rotation if it is not supported
2494 for 4K LiveVideo snapshot case (online processing) */
2495 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2496 CAM_QCOM_FEATURE_ROTATION)) {
2497 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2498 &= ~CAM_QCOM_FEATURE_ROTATION;
2499 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002500 } else {
2501 if (bUseCommonFeatureMask &&
2502 isOnEncoder(maxViewfinderSize, newStream->width,
2503 newStream->height)) {
2504 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2505 } else {
2506 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2507 }
2508 }
2509 if (isZsl) {
2510 if (zslStream) {
2511 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2512 (int32_t)zslStream->width;
2513 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2514 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2516 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002517 } else {
2518 LOGE("Error, No ZSL stream identified");
2519 pthread_mutex_unlock(&mMutex);
2520 return -EINVAL;
2521 }
2522 } else if (m_bIs4KVideo) {
2523 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2524 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2525 } else if (bYuv888OverrideJpeg) {
2526 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2527 (int32_t)largeYuv888Size.width;
2528 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2529 (int32_t)largeYuv888Size.height;
2530 }
2531 break;
2532 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2533 case HAL_PIXEL_FORMAT_RAW16:
2534 case HAL_PIXEL_FORMAT_RAW10:
2535 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2536 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2537 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002538 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2539 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2540 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2541 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2542 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2543 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2544 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2545 gCamCapability[mCameraId]->dt[mPDIndex];
2546 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2547 gCamCapability[mCameraId]->vc[mPDIndex];
2548 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002549 break;
2550 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002551 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002552 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2553 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2554 break;
2555 }
2556 }
2557
2558 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2559 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2560 gCamCapability[mCameraId]->color_arrangement);
2561
2562 if (newStream->priv == NULL) {
2563 //New stream, construct channel
2564 switch (newStream->stream_type) {
2565 case CAMERA3_STREAM_INPUT:
2566 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2567 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2568 break;
2569 case CAMERA3_STREAM_BIDIRECTIONAL:
2570 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2571 GRALLOC_USAGE_HW_CAMERA_WRITE;
2572 break;
2573 case CAMERA3_STREAM_OUTPUT:
2574 /* For video encoding stream, set read/write rarely
2575 * flag so that they may be set to un-cached */
2576 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2577 newStream->usage |=
2578 (GRALLOC_USAGE_SW_READ_RARELY |
2579 GRALLOC_USAGE_SW_WRITE_RARELY |
2580 GRALLOC_USAGE_HW_CAMERA_WRITE);
2581 else if (IS_USAGE_ZSL(newStream->usage))
2582 {
2583 LOGD("ZSL usage flag skipping");
2584 }
2585 else if (newStream == zslStream
2586 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2587 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2588 } else
2589 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2590 break;
2591 default:
2592 LOGE("Invalid stream_type %d", newStream->stream_type);
2593 break;
2594 }
2595
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002596 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002597 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2598 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2599 QCamera3ProcessingChannel *channel = NULL;
2600 switch (newStream->format) {
2601 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2602 if ((newStream->usage &
2603 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2604 (streamList->operation_mode ==
2605 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2606 ) {
2607 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2608 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002609 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002610 this,
2611 newStream,
2612 (cam_stream_type_t)
2613 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2614 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2615 mMetadataChannel,
2616 0); //heap buffers are not required for HFR video channel
2617 if (channel == NULL) {
2618 LOGE("allocation of channel failed");
2619 pthread_mutex_unlock(&mMutex);
2620 return -ENOMEM;
2621 }
2622 //channel->getNumBuffers() will return 0 here so use
2623 //MAX_INFLIGH_HFR_REQUESTS
2624 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2625 newStream->priv = channel;
2626 LOGI("num video buffers in HFR mode: %d",
2627 MAX_INFLIGHT_HFR_REQUESTS);
2628 } else {
2629 /* Copy stream contents in HFR preview only case to create
2630 * dummy batch channel so that sensor streaming is in
2631 * HFR mode */
2632 if (!m_bIsVideo && (streamList->operation_mode ==
2633 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2634 mDummyBatchStream = *newStream;
2635 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002636 int bufferCount = MAX_INFLIGHT_REQUESTS;
2637 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2638 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002639 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2640 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2641 bufferCount = m_bIs4KVideo ?
2642 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2643 }
2644
Thierry Strudel2896d122017-02-23 19:18:03 -08002645 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002646 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2647 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002648 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002649 this,
2650 newStream,
2651 (cam_stream_type_t)
2652 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2653 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2654 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002655 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002656 if (channel == NULL) {
2657 LOGE("allocation of channel failed");
2658 pthread_mutex_unlock(&mMutex);
2659 return -ENOMEM;
2660 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002661 /* disable UBWC for preview, though supported,
2662 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002663 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002664 (previewSize.width == (int32_t)videoWidth)&&
2665 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002666 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002667 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002668 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002669 /* When goog_zoom is linked to the preview or video stream,
2670 * disable ubwc to the linked stream */
2671 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2672 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2673 channel->setUBWCEnabled(false);
2674 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002675 newStream->max_buffers = channel->getNumBuffers();
2676 newStream->priv = channel;
2677 }
2678 break;
2679 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2680 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2681 mChannelHandle,
2682 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002683 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002684 this,
2685 newStream,
2686 (cam_stream_type_t)
2687 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2688 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2689 mMetadataChannel);
2690 if (channel == NULL) {
2691 LOGE("allocation of YUV channel failed");
2692 pthread_mutex_unlock(&mMutex);
2693 return -ENOMEM;
2694 }
2695 newStream->max_buffers = channel->getNumBuffers();
2696 newStream->priv = channel;
2697 break;
2698 }
2699 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2700 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002701 case HAL_PIXEL_FORMAT_RAW10: {
2702 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2703 (HAL_DATASPACE_DEPTH != newStream->data_space))
2704 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002705 mRawChannel = new QCamera3RawChannel(
2706 mCameraHandle->camera_handle, mChannelHandle,
2707 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002708 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002709 this, newStream,
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002711 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002712 if (mRawChannel == NULL) {
2713 LOGE("allocation of raw channel failed");
2714 pthread_mutex_unlock(&mMutex);
2715 return -ENOMEM;
2716 }
2717 newStream->max_buffers = mRawChannel->getNumBuffers();
2718 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2719 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002720 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002721 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002722 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2723 mDepthChannel = new QCamera3DepthChannel(
2724 mCameraHandle->camera_handle, mChannelHandle,
2725 mCameraHandle->ops, NULL, NULL, &padding_info,
2726 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2727 mMetadataChannel);
2728 if (NULL == mDepthChannel) {
2729 LOGE("Allocation of depth channel failed");
2730 pthread_mutex_unlock(&mMutex);
2731 return NO_MEMORY;
2732 }
2733 newStream->priv = mDepthChannel;
2734 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2735 } else {
2736 // Max live snapshot inflight buffer is 1. This is to mitigate
2737 // frame drop issues for video snapshot. The more buffers being
2738 // allocated, the more frame drops there are.
2739 mPictureChannel = new QCamera3PicChannel(
2740 mCameraHandle->camera_handle, mChannelHandle,
2741 mCameraHandle->ops, captureResultCb,
2742 setBufferErrorStatus, &padding_info, this, newStream,
2743 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2744 m_bIs4KVideo, isZsl, mMetadataChannel,
2745 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2746 if (mPictureChannel == NULL) {
2747 LOGE("allocation of channel failed");
2748 pthread_mutex_unlock(&mMutex);
2749 return -ENOMEM;
2750 }
2751 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2752 newStream->max_buffers = mPictureChannel->getNumBuffers();
2753 mPictureChannel->overrideYuvSize(
2754 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2755 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002756 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002757 break;
2758
2759 default:
2760 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002761 pthread_mutex_unlock(&mMutex);
2762 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002763 }
2764 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2765 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2766 } else {
2767 LOGE("Error, Unknown stream type");
2768 pthread_mutex_unlock(&mMutex);
2769 return -EINVAL;
2770 }
2771
2772 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002773 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002774 // Here we only care whether it's EIS3 or not
2775 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2776 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2777 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2778 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002779 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002780 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002781 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002782 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2783 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2784 }
2785 }
2786
2787 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2788 it != mStreamInfo.end(); it++) {
2789 if ((*it)->stream == newStream) {
2790 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2791 break;
2792 }
2793 }
2794 } else {
2795 // Channel already exists for this stream
2796 // Do nothing for now
2797 }
2798 padding_info = gCamCapability[mCameraId]->padding_info;
2799
Emilian Peev7650c122017-01-19 08:24:33 -08002800 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002801 * since there is no real stream associated with it
2802 */
Emilian Peev7650c122017-01-19 08:24:33 -08002803 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002804 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2805 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002806 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002807 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002808 }
2809
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002810 // Let buffer dispatcher know the configured streams.
2811 mOutputBufferDispatcher.configureStreams(streamList);
2812
Thierry Strudel2896d122017-02-23 19:18:03 -08002813 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2814 onlyRaw = false;
2815 }
2816
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002817 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002818 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002819 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002820 cam_analysis_info_t analysisInfo;
2821 int32_t ret = NO_ERROR;
2822 ret = mCommon.getAnalysisInfo(
2823 FALSE,
2824 analysisFeatureMask,
2825 &analysisInfo);
2826 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002827 cam_color_filter_arrangement_t analysis_color_arrangement =
2828 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2829 CAM_FILTER_ARRANGEMENT_Y :
2830 gCamCapability[mCameraId]->color_arrangement);
2831 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2832 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002833 cam_dimension_t analysisDim;
2834 analysisDim = mCommon.getMatchingDimension(previewSize,
2835 analysisInfo.analysis_recommended_res);
2836
2837 mAnalysisChannel = new QCamera3SupportChannel(
2838 mCameraHandle->camera_handle,
2839 mChannelHandle,
2840 mCameraHandle->ops,
2841 &analysisInfo.analysis_padding_info,
2842 analysisFeatureMask,
2843 CAM_STREAM_TYPE_ANALYSIS,
2844 &analysisDim,
2845 (analysisInfo.analysis_format
2846 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2847 : CAM_FORMAT_YUV_420_NV21),
2848 analysisInfo.hw_analysis_supported,
2849 gCamCapability[mCameraId]->color_arrangement,
2850 this,
2851 0); // force buffer count to 0
2852 } else {
2853 LOGW("getAnalysisInfo failed, ret = %d", ret);
2854 }
2855 if (!mAnalysisChannel) {
2856 LOGW("Analysis channel cannot be created");
2857 }
2858 }
2859
Thierry Strudel3d639192016-09-09 11:52:26 -07002860 //RAW DUMP channel
2861 if (mEnableRawDump && isRawStreamRequested == false){
2862 cam_dimension_t rawDumpSize;
2863 rawDumpSize = getMaxRawSize(mCameraId);
2864 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2865 setPAAFSupport(rawDumpFeatureMask,
2866 CAM_STREAM_TYPE_RAW,
2867 gCamCapability[mCameraId]->color_arrangement);
2868 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2869 mChannelHandle,
2870 mCameraHandle->ops,
2871 rawDumpSize,
2872 &padding_info,
2873 this, rawDumpFeatureMask);
2874 if (!mRawDumpChannel) {
2875 LOGE("Raw Dump channel cannot be created");
2876 pthread_mutex_unlock(&mMutex);
2877 return -ENOMEM;
2878 }
2879 }
2880
Thierry Strudel3d639192016-09-09 11:52:26 -07002881 if (mAnalysisChannel) {
2882 cam_analysis_info_t analysisInfo;
2883 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2884 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2885 CAM_STREAM_TYPE_ANALYSIS;
2886 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2887 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002888 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002889 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2890 &analysisInfo);
2891 if (rc != NO_ERROR) {
2892 LOGE("getAnalysisInfo failed, ret = %d", rc);
2893 pthread_mutex_unlock(&mMutex);
2894 return rc;
2895 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002896 cam_color_filter_arrangement_t analysis_color_arrangement =
2897 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2898 CAM_FILTER_ARRANGEMENT_Y :
2899 gCamCapability[mCameraId]->color_arrangement);
2900 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2901 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2902 analysis_color_arrangement);
2903
Thierry Strudel3d639192016-09-09 11:52:26 -07002904 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002905 mCommon.getMatchingDimension(previewSize,
2906 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002907 mStreamConfigInfo.num_streams++;
2908 }
2909
Thierry Strudel2896d122017-02-23 19:18:03 -08002910 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002911 cam_analysis_info_t supportInfo;
2912 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2913 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2914 setPAAFSupport(callbackFeatureMask,
2915 CAM_STREAM_TYPE_CALLBACK,
2916 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002917 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002918 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002919 if (ret != NO_ERROR) {
2920 /* Ignore the error for Mono camera
2921 * because the PAAF bit mask is only set
2922 * for CAM_STREAM_TYPE_ANALYSIS stream type
2923 */
2924 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2925 LOGW("getAnalysisInfo failed, ret = %d", ret);
2926 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002927 }
2928 mSupportChannel = new QCamera3SupportChannel(
2929 mCameraHandle->camera_handle,
2930 mChannelHandle,
2931 mCameraHandle->ops,
2932 &gCamCapability[mCameraId]->padding_info,
2933 callbackFeatureMask,
2934 CAM_STREAM_TYPE_CALLBACK,
2935 &QCamera3SupportChannel::kDim,
2936 CAM_FORMAT_YUV_420_NV21,
2937 supportInfo.hw_analysis_supported,
2938 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002939 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002940 if (!mSupportChannel) {
2941 LOGE("dummy channel cannot be created");
2942 pthread_mutex_unlock(&mMutex);
2943 return -ENOMEM;
2944 }
2945 }
2946
2947 if (mSupportChannel) {
2948 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2949 QCamera3SupportChannel::kDim;
2950 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2951 CAM_STREAM_TYPE_CALLBACK;
2952 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2953 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2954 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2955 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2956 gCamCapability[mCameraId]->color_arrangement);
2957 mStreamConfigInfo.num_streams++;
2958 }
2959
2960 if (mRawDumpChannel) {
2961 cam_dimension_t rawSize;
2962 rawSize = getMaxRawSize(mCameraId);
2963 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2964 rawSize;
2965 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2966 CAM_STREAM_TYPE_RAW;
2967 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2968 CAM_QCOM_FEATURE_NONE;
2969 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2970 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2971 gCamCapability[mCameraId]->color_arrangement);
2972 mStreamConfigInfo.num_streams++;
2973 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002974
2975 if (mHdrPlusRawSrcChannel) {
2976 cam_dimension_t rawSize;
2977 rawSize = getMaxRawSize(mCameraId);
2978 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2979 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2980 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2981 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2982 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2983 gCamCapability[mCameraId]->color_arrangement);
2984 mStreamConfigInfo.num_streams++;
2985 }
2986
Thierry Strudel3d639192016-09-09 11:52:26 -07002987 /* In HFR mode, if video stream is not added, create a dummy channel so that
2988 * ISP can create a batch mode even for preview only case. This channel is
2989 * never 'start'ed (no stream-on), it is only 'initialized' */
2990 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2991 !m_bIsVideo) {
2992 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2993 setPAAFSupport(dummyFeatureMask,
2994 CAM_STREAM_TYPE_VIDEO,
2995 gCamCapability[mCameraId]->color_arrangement);
2996 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2997 mChannelHandle,
2998 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002999 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003000 this,
3001 &mDummyBatchStream,
3002 CAM_STREAM_TYPE_VIDEO,
3003 dummyFeatureMask,
3004 mMetadataChannel);
3005 if (NULL == mDummyBatchChannel) {
3006 LOGE("creation of mDummyBatchChannel failed."
3007 "Preview will use non-hfr sensor mode ");
3008 }
3009 }
3010 if (mDummyBatchChannel) {
3011 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3012 mDummyBatchStream.width;
3013 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3014 mDummyBatchStream.height;
3015 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3016 CAM_STREAM_TYPE_VIDEO;
3017 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3018 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3019 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3020 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3021 gCamCapability[mCameraId]->color_arrangement);
3022 mStreamConfigInfo.num_streams++;
3023 }
3024
3025 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3026 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003027 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003028 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003029
3030 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3031 for (pendingRequestIterator i = mPendingRequestsList.begin();
3032 i != mPendingRequestsList.end();) {
3033 i = erasePendingRequest(i);
3034 }
3035 mPendingFrameDropList.clear();
3036 // Initialize/Reset the pending buffers list
3037 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3038 req.mPendingBufferList.clear();
3039 }
3040 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003041 mExpectedInflightDuration = 0;
3042 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003043
Thierry Strudel3d639192016-09-09 11:52:26 -07003044 mCurJpegMeta.clear();
3045 //Get min frame duration for this streams configuration
3046 deriveMinFrameDuration();
3047
Chien-Yu Chenee335912017-02-09 17:53:20 -08003048 mFirstPreviewIntentSeen = false;
3049
3050 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003051 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003052 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3053 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003054 disableHdrPlusModeLocked();
3055 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003056
Thierry Strudel3d639192016-09-09 11:52:26 -07003057 // Update state
3058 mState = CONFIGURED;
3059
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003060 mFirstMetadataCallback = true;
3061
Thierry Strudel3d639192016-09-09 11:52:26 -07003062 pthread_mutex_unlock(&mMutex);
3063
3064 return rc;
3065}
3066
3067/*===========================================================================
3068 * FUNCTION : validateCaptureRequest
3069 *
3070 * DESCRIPTION: validate a capture request from camera service
3071 *
3072 * PARAMETERS :
3073 * @request : request from framework to process
3074 *
3075 * RETURN :
3076 *
3077 *==========================================================================*/
3078int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003079 camera3_capture_request_t *request,
3080 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003081{
3082 ssize_t idx = 0;
3083 const camera3_stream_buffer_t *b;
3084 CameraMetadata meta;
3085
3086 /* Sanity check the request */
3087 if (request == NULL) {
3088 LOGE("NULL capture request");
3089 return BAD_VALUE;
3090 }
3091
3092 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3093 /*settings cannot be null for the first request*/
3094 return BAD_VALUE;
3095 }
3096
3097 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003098 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3099 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003100 LOGE("Request %d: No output buffers provided!",
3101 __FUNCTION__, frameNumber);
3102 return BAD_VALUE;
3103 }
3104 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3105 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3106 request->num_output_buffers, MAX_NUM_STREAMS);
3107 return BAD_VALUE;
3108 }
3109 if (request->input_buffer != NULL) {
3110 b = request->input_buffer;
3111 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3112 LOGE("Request %d: Buffer %ld: Status not OK!",
3113 frameNumber, (long)idx);
3114 return BAD_VALUE;
3115 }
3116 if (b->release_fence != -1) {
3117 LOGE("Request %d: Buffer %ld: Has a release fence!",
3118 frameNumber, (long)idx);
3119 return BAD_VALUE;
3120 }
3121 if (b->buffer == NULL) {
3122 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3123 frameNumber, (long)idx);
3124 return BAD_VALUE;
3125 }
3126 }
3127
3128 // Validate all buffers
3129 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003130 if (b == NULL) {
3131 return BAD_VALUE;
3132 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003133 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003134 QCamera3ProcessingChannel *channel =
3135 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3136 if (channel == NULL) {
3137 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3138 frameNumber, (long)idx);
3139 return BAD_VALUE;
3140 }
3141 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3142 LOGE("Request %d: Buffer %ld: Status not OK!",
3143 frameNumber, (long)idx);
3144 return BAD_VALUE;
3145 }
3146 if (b->release_fence != -1) {
3147 LOGE("Request %d: Buffer %ld: Has a release fence!",
3148 frameNumber, (long)idx);
3149 return BAD_VALUE;
3150 }
3151 if (b->buffer == NULL) {
3152 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3153 frameNumber, (long)idx);
3154 return BAD_VALUE;
3155 }
3156 if (*(b->buffer) == NULL) {
3157 LOGE("Request %d: Buffer %ld: NULL private handle!",
3158 frameNumber, (long)idx);
3159 return BAD_VALUE;
3160 }
3161 idx++;
3162 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003163 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003164 return NO_ERROR;
3165}
3166
3167/*===========================================================================
3168 * FUNCTION : deriveMinFrameDuration
3169 *
3170 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3171 * on currently configured streams.
3172 *
3173 * PARAMETERS : NONE
3174 *
3175 * RETURN : NONE
3176 *
3177 *==========================================================================*/
3178void QCamera3HardwareInterface::deriveMinFrameDuration()
3179{
3180 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003181 bool hasRaw = false;
3182
3183 mMinRawFrameDuration = 0;
3184 mMinJpegFrameDuration = 0;
3185 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003186
3187 maxJpegDim = 0;
3188 maxProcessedDim = 0;
3189 maxRawDim = 0;
3190
3191 // Figure out maximum jpeg, processed, and raw dimensions
3192 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3193 it != mStreamInfo.end(); it++) {
3194
3195 // Input stream doesn't have valid stream_type
3196 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3197 continue;
3198
3199 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3200 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3201 if (dimension > maxJpegDim)
3202 maxJpegDim = dimension;
3203 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3204 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3205 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003206 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003207 if (dimension > maxRawDim)
3208 maxRawDim = dimension;
3209 } else {
3210 if (dimension > maxProcessedDim)
3211 maxProcessedDim = dimension;
3212 }
3213 }
3214
3215 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3216 MAX_SIZES_CNT);
3217
3218 //Assume all jpeg dimensions are in processed dimensions.
3219 if (maxJpegDim > maxProcessedDim)
3220 maxProcessedDim = maxJpegDim;
3221 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003222 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003223 maxRawDim = INT32_MAX;
3224
3225 for (size_t i = 0; i < count; i++) {
3226 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3227 gCamCapability[mCameraId]->raw_dim[i].height;
3228 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3229 maxRawDim = dimension;
3230 }
3231 }
3232
3233 //Find minimum durations for processed, jpeg, and raw
3234 for (size_t i = 0; i < count; i++) {
3235 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3236 gCamCapability[mCameraId]->raw_dim[i].height) {
3237 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3238 break;
3239 }
3240 }
3241 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3242 for (size_t i = 0; i < count; i++) {
3243 if (maxProcessedDim ==
3244 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3245 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3246 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3247 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3248 break;
3249 }
3250 }
3251}
3252
3253/*===========================================================================
3254 * FUNCTION : getMinFrameDuration
3255 *
3256 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3257 * and current request configuration.
3258 *
3259 * PARAMETERS : @request: requset sent by the frameworks
3260 *
3261 * RETURN : min farme duration for a particular request
3262 *
3263 *==========================================================================*/
3264int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3265{
3266 bool hasJpegStream = false;
3267 bool hasRawStream = false;
3268 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3269 const camera3_stream_t *stream = request->output_buffers[i].stream;
3270 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3271 hasJpegStream = true;
3272 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3273 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3274 stream->format == HAL_PIXEL_FORMAT_RAW16)
3275 hasRawStream = true;
3276 }
3277
3278 if (!hasJpegStream)
3279 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3280 else
3281 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3282}
3283
3284/*===========================================================================
3285 * FUNCTION : handleBuffersDuringFlushLock
3286 *
3287 * DESCRIPTION: Account for buffers returned from back-end during flush
3288 * This function is executed while mMutex is held by the caller.
3289 *
3290 * PARAMETERS :
3291 * @buffer: image buffer for the callback
3292 *
3293 * RETURN :
3294 *==========================================================================*/
3295void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3296{
3297 bool buffer_found = false;
3298 for (List<PendingBuffersInRequest>::iterator req =
3299 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3300 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3301 for (List<PendingBufferInfo>::iterator i =
3302 req->mPendingBufferList.begin();
3303 i != req->mPendingBufferList.end(); i++) {
3304 if (i->buffer == buffer->buffer) {
3305 mPendingBuffersMap.numPendingBufsAtFlush--;
3306 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3307 buffer->buffer, req->frame_number,
3308 mPendingBuffersMap.numPendingBufsAtFlush);
3309 buffer_found = true;
3310 break;
3311 }
3312 }
3313 if (buffer_found) {
3314 break;
3315 }
3316 }
3317 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3318 //signal the flush()
3319 LOGD("All buffers returned to HAL. Continue flush");
3320 pthread_cond_signal(&mBuffersCond);
3321 }
3322}
3323
Thierry Strudel3d639192016-09-09 11:52:26 -07003324/*===========================================================================
3325 * FUNCTION : handleBatchMetadata
3326 *
3327 * DESCRIPTION: Handles metadata buffer callback in batch mode
3328 *
3329 * PARAMETERS : @metadata_buf: metadata buffer
3330 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3331 * the meta buf in this method
3332 *
3333 * RETURN :
3334 *
3335 *==========================================================================*/
3336void QCamera3HardwareInterface::handleBatchMetadata(
3337 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3338{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003339 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003340
3341 if (NULL == metadata_buf) {
3342 LOGE("metadata_buf is NULL");
3343 return;
3344 }
3345 /* In batch mode, the metdata will contain the frame number and timestamp of
3346 * the last frame in the batch. Eg: a batch containing buffers from request
3347 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3348 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3349 * multiple process_capture_results */
3350 metadata_buffer_t *metadata =
3351 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3352 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3353 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3354 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3355 uint32_t frame_number = 0, urgent_frame_number = 0;
3356 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3357 bool invalid_metadata = false;
3358 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3359 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003360 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003361
3362 int32_t *p_frame_number_valid =
3363 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3364 uint32_t *p_frame_number =
3365 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3366 int64_t *p_capture_time =
3367 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3368 int32_t *p_urgent_frame_number_valid =
3369 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3370 uint32_t *p_urgent_frame_number =
3371 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3372
3373 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3374 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3375 (NULL == p_urgent_frame_number)) {
3376 LOGE("Invalid metadata");
3377 invalid_metadata = true;
3378 } else {
3379 frame_number_valid = *p_frame_number_valid;
3380 last_frame_number = *p_frame_number;
3381 last_frame_capture_time = *p_capture_time;
3382 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3383 last_urgent_frame_number = *p_urgent_frame_number;
3384 }
3385
3386 /* In batchmode, when no video buffers are requested, set_parms are sent
3387 * for every capture_request. The difference between consecutive urgent
3388 * frame numbers and frame numbers should be used to interpolate the
3389 * corresponding frame numbers and time stamps */
3390 pthread_mutex_lock(&mMutex);
3391 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003392 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3393 if(idx < 0) {
3394 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3395 last_urgent_frame_number);
3396 mState = ERROR;
3397 pthread_mutex_unlock(&mMutex);
3398 return;
3399 }
3400 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003401 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3402 first_urgent_frame_number;
3403
3404 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3405 urgent_frame_number_valid,
3406 first_urgent_frame_number, last_urgent_frame_number);
3407 }
3408
3409 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003410 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3411 if(idx < 0) {
3412 LOGE("Invalid frame number received: %d. Irrecoverable error",
3413 last_frame_number);
3414 mState = ERROR;
3415 pthread_mutex_unlock(&mMutex);
3416 return;
3417 }
3418 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003419 frameNumDiff = last_frame_number + 1 -
3420 first_frame_number;
3421 mPendingBatchMap.removeItem(last_frame_number);
3422
3423 LOGD("frm: valid: %d frm_num: %d - %d",
3424 frame_number_valid,
3425 first_frame_number, last_frame_number);
3426
3427 }
3428 pthread_mutex_unlock(&mMutex);
3429
3430 if (urgent_frame_number_valid || frame_number_valid) {
3431 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3432 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3433 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3434 urgentFrameNumDiff, last_urgent_frame_number);
3435 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3436 LOGE("frameNumDiff: %d frameNum: %d",
3437 frameNumDiff, last_frame_number);
3438 }
3439
3440 for (size_t i = 0; i < loopCount; i++) {
3441 /* handleMetadataWithLock is called even for invalid_metadata for
3442 * pipeline depth calculation */
3443 if (!invalid_metadata) {
3444 /* Infer frame number. Batch metadata contains frame number of the
3445 * last frame */
3446 if (urgent_frame_number_valid) {
3447 if (i < urgentFrameNumDiff) {
3448 urgent_frame_number =
3449 first_urgent_frame_number + i;
3450 LOGD("inferred urgent frame_number: %d",
3451 urgent_frame_number);
3452 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3453 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3454 } else {
3455 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3456 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3457 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3458 }
3459 }
3460
3461 /* Infer frame number. Batch metadata contains frame number of the
3462 * last frame */
3463 if (frame_number_valid) {
3464 if (i < frameNumDiff) {
3465 frame_number = first_frame_number + i;
3466 LOGD("inferred frame_number: %d", frame_number);
3467 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3468 CAM_INTF_META_FRAME_NUMBER, frame_number);
3469 } else {
3470 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3471 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3472 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3473 }
3474 }
3475
3476 if (last_frame_capture_time) {
3477 //Infer timestamp
3478 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003479 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003480 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003481 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003482 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3483 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3484 LOGD("batch capture_time: %lld, capture_time: %lld",
3485 last_frame_capture_time, capture_time);
3486 }
3487 }
3488 pthread_mutex_lock(&mMutex);
3489 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003490 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003491 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3492 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003493 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003494 pthread_mutex_unlock(&mMutex);
3495 }
3496
3497 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003498 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003499 mMetadataChannel->bufDone(metadata_buf);
3500 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003501 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003502 }
3503}
3504
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003505void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3506 camera3_error_msg_code_t errorCode)
3507{
3508 camera3_notify_msg_t notify_msg;
3509 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3510 notify_msg.type = CAMERA3_MSG_ERROR;
3511 notify_msg.message.error.error_code = errorCode;
3512 notify_msg.message.error.error_stream = NULL;
3513 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003514 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003515
3516 return;
3517}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003518
3519/*===========================================================================
3520 * FUNCTION : sendPartialMetadataWithLock
3521 *
3522 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3523 *
3524 * PARAMETERS : @metadata: metadata buffer
3525 * @requestIter: The iterator for the pending capture request for
3526 * which the partial result is being sen
3527 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3528 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003529 * @isJumpstartMetadata: Whether this is a partial metadata for
3530 * jumpstart, i.e. even though it doesn't map to a valid partial
3531 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003532 *
3533 * RETURN :
3534 *
3535 *==========================================================================*/
3536
3537void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3538 metadata_buffer_t *metadata,
3539 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003540 bool lastUrgentMetadataInBatch,
3541 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003542{
3543 camera3_capture_result_t result;
3544 memset(&result, 0, sizeof(camera3_capture_result_t));
3545
3546 requestIter->partial_result_cnt++;
3547
3548 // Extract 3A metadata
3549 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003550 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3551 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003552 // Populate metadata result
3553 result.frame_number = requestIter->frame_number;
3554 result.num_output_buffers = 0;
3555 result.output_buffers = NULL;
3556 result.partial_result = requestIter->partial_result_cnt;
3557
3558 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003559 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003560 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3561 // Notify HDR+ client about the partial metadata.
3562 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3563 result.partial_result == PARTIAL_RESULT_COUNT);
3564 }
3565 }
3566
3567 orchestrateResult(&result);
3568 LOGD("urgent frame_number = %u", result.frame_number);
3569 free_camera_metadata((camera_metadata_t *)result.result);
3570}
3571
Thierry Strudel3d639192016-09-09 11:52:26 -07003572/*===========================================================================
3573 * FUNCTION : handleMetadataWithLock
3574 *
3575 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3576 *
3577 * PARAMETERS : @metadata_buf: metadata buffer
3578 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3579 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003580 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3581 * last urgent metadata in a batch. Always true for non-batch mode
3582 * @lastMetadataInBatch: Boolean to indicate whether this is the
3583 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003584 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3585 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003586 *
3587 * RETURN :
3588 *
3589 *==========================================================================*/
3590void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003591 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003592 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3593 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003594{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003595 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003596 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3597 //during flush do not send metadata from this thread
3598 LOGD("not sending metadata during flush or when mState is error");
3599 if (free_and_bufdone_meta_buf) {
3600 mMetadataChannel->bufDone(metadata_buf);
3601 free(metadata_buf);
3602 }
3603 return;
3604 }
3605
3606 //not in flush
3607 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3608 int32_t frame_number_valid, urgent_frame_number_valid;
3609 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003610 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003611 nsecs_t currentSysTime;
3612
3613 int32_t *p_frame_number_valid =
3614 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3615 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3616 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003617 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003618 int32_t *p_urgent_frame_number_valid =
3619 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3620 uint32_t *p_urgent_frame_number =
3621 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3622 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3623 metadata) {
3624 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3625 *p_frame_number_valid, *p_frame_number);
3626 }
3627
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003628 camera_metadata_t *resultMetadata = nullptr;
3629
Thierry Strudel3d639192016-09-09 11:52:26 -07003630 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3631 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3632 LOGE("Invalid metadata");
3633 if (free_and_bufdone_meta_buf) {
3634 mMetadataChannel->bufDone(metadata_buf);
3635 free(metadata_buf);
3636 }
3637 goto done_metadata;
3638 }
3639 frame_number_valid = *p_frame_number_valid;
3640 frame_number = *p_frame_number;
3641 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003642 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003643 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3644 urgent_frame_number = *p_urgent_frame_number;
3645 currentSysTime = systemTime(CLOCK_MONOTONIC);
3646
Jason Lee603176d2017-05-31 11:43:27 -07003647 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3648 const int tries = 3;
3649 nsecs_t bestGap, measured;
3650 for (int i = 0; i < tries; ++i) {
3651 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3652 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3653 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3654 const nsecs_t gap = tmono2 - tmono;
3655 if (i == 0 || gap < bestGap) {
3656 bestGap = gap;
3657 measured = tbase - ((tmono + tmono2) >> 1);
3658 }
3659 }
3660 capture_time -= measured;
3661 }
3662
Thierry Strudel3d639192016-09-09 11:52:26 -07003663 // Detect if buffers from any requests are overdue
3664 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003665 int64_t timeout;
3666 {
3667 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3668 // If there is a pending HDR+ request, the following requests may be blocked until the
3669 // HDR+ request is done. So allow a longer timeout.
3670 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3671 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003672 if (timeout < mExpectedInflightDuration) {
3673 timeout = mExpectedInflightDuration;
3674 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003675 }
3676
3677 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003678 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003679 assert(missed.stream->priv);
3680 if (missed.stream->priv) {
3681 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3682 assert(ch->mStreams[0]);
3683 if (ch->mStreams[0]) {
3684 LOGE("Cancel missing frame = %d, buffer = %p,"
3685 "stream type = %d, stream format = %d",
3686 req.frame_number, missed.buffer,
3687 ch->mStreams[0]->getMyType(), missed.stream->format);
3688 ch->timeoutFrame(req.frame_number);
3689 }
3690 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003691 }
3692 }
3693 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003694 //For the very first metadata callback, regardless whether it contains valid
3695 //frame number, send the partial metadata for the jumpstarting requests.
3696 //Note that this has to be done even if the metadata doesn't contain valid
3697 //urgent frame number, because in the case only 1 request is ever submitted
3698 //to HAL, there won't be subsequent valid urgent frame number.
3699 if (mFirstMetadataCallback) {
3700 for (pendingRequestIterator i =
3701 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3702 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003703 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3704 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003705 }
3706 }
3707 mFirstMetadataCallback = false;
3708 }
3709
Thierry Strudel3d639192016-09-09 11:52:26 -07003710 //Partial result on process_capture_result for timestamp
3711 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003712 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003713
3714 //Recieved an urgent Frame Number, handle it
3715 //using partial results
3716 for (pendingRequestIterator i =
3717 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3718 LOGD("Iterator Frame = %d urgent frame = %d",
3719 i->frame_number, urgent_frame_number);
3720
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003721 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003722 (i->partial_result_cnt == 0)) {
3723 LOGE("Error: HAL missed urgent metadata for frame number %d",
3724 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003725 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003726 }
3727
3728 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003729 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003730 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3731 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003732 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3733 // Instant AEC settled for this frame.
3734 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3735 mInstantAECSettledFrameNumber = urgent_frame_number;
3736 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003737 break;
3738 }
3739 }
3740 }
3741
3742 if (!frame_number_valid) {
3743 LOGD("Not a valid normal frame number, used as SOF only");
3744 if (free_and_bufdone_meta_buf) {
3745 mMetadataChannel->bufDone(metadata_buf);
3746 free(metadata_buf);
3747 }
3748 goto done_metadata;
3749 }
3750 LOGH("valid frame_number = %u, capture_time = %lld",
3751 frame_number, capture_time);
3752
Emilian Peev4e0fe952017-06-30 12:40:09 -07003753 handleDepthDataLocked(metadata->depth_data, frame_number,
3754 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003755
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003756 // Check whether any stream buffer corresponding to this is dropped or not
3757 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3758 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3759 for (auto & pendingRequest : mPendingRequestsList) {
3760 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3761 mInstantAECSettledFrameNumber)) {
3762 camera3_notify_msg_t notify_msg = {};
3763 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003764 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003765 QCamera3ProcessingChannel *channel =
3766 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003767 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003768 if (p_cam_frame_drop) {
3769 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003770 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003771 // Got the stream ID for drop frame.
3772 dropFrame = true;
3773 break;
3774 }
3775 }
3776 } else {
3777 // This is instant AEC case.
3778 // For instant AEC drop the stream untill AEC is settled.
3779 dropFrame = true;
3780 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003781
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003782 if (dropFrame) {
3783 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3784 if (p_cam_frame_drop) {
3785 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003786 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003787 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003788 } else {
3789 // For instant AEC, inform frame drop and frame number
3790 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3791 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003792 pendingRequest.frame_number, streamID,
3793 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003794 }
3795 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003796 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003797 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003798 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003799 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003800 if (p_cam_frame_drop) {
3801 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003802 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003803 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003804 } else {
3805 // For instant AEC, inform frame drop and frame number
3806 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3807 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003808 pendingRequest.frame_number, streamID,
3809 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003810 }
3811 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003812 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003813 PendingFrameDrop.stream_ID = streamID;
3814 // Add the Frame drop info to mPendingFrameDropList
3815 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003816 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003817 }
3818 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003819 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003820
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003821 for (auto & pendingRequest : mPendingRequestsList) {
3822 // Find the pending request with the frame number.
3823 if (pendingRequest.frame_number == frame_number) {
3824 // Update the sensor timestamp.
3825 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003826
Thierry Strudel3d639192016-09-09 11:52:26 -07003827
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003828 /* Set the timestamp in display metadata so that clients aware of
3829 private_handle such as VT can use this un-modified timestamps.
3830 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003831 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003832
Thierry Strudel3d639192016-09-09 11:52:26 -07003833 // Find channel requiring metadata, meaning internal offline postprocess
3834 // is needed.
3835 //TODO: for now, we don't support two streams requiring metadata at the same time.
3836 // (because we are not making copies, and metadata buffer is not reference counted.
3837 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003838 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3839 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003840 if (iter->need_metadata) {
3841 internalPproc = true;
3842 QCamera3ProcessingChannel *channel =
3843 (QCamera3ProcessingChannel *)iter->stream->priv;
3844 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003845 if(p_is_metabuf_queued != NULL) {
3846 *p_is_metabuf_queued = true;
3847 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003848 break;
3849 }
3850 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003851 for (auto itr = pendingRequest.internalRequestList.begin();
3852 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003853 if (itr->need_metadata) {
3854 internalPproc = true;
3855 QCamera3ProcessingChannel *channel =
3856 (QCamera3ProcessingChannel *)itr->stream->priv;
3857 channel->queueReprocMetadata(metadata_buf);
3858 break;
3859 }
3860 }
3861
Thierry Strudel54dc9782017-02-15 12:12:10 -08003862 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003863
3864 bool *enableZsl = nullptr;
3865 if (gExposeEnableZslKey) {
3866 enableZsl = &pendingRequest.enableZsl;
3867 }
3868
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003869 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003870 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003871 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003872
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003873 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003874
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003875 if (pendingRequest.blob_request) {
3876 //Dump tuning metadata if enabled and available
3877 char prop[PROPERTY_VALUE_MAX];
3878 memset(prop, 0, sizeof(prop));
3879 property_get("persist.camera.dumpmetadata", prop, "0");
3880 int32_t enabled = atoi(prop);
3881 if (enabled && metadata->is_tuning_params_valid) {
3882 dumpMetadataToFile(metadata->tuning_params,
3883 mMetaFrameCount,
3884 enabled,
3885 "Snapshot",
3886 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003887 }
3888 }
3889
3890 if (!internalPproc) {
3891 LOGD("couldn't find need_metadata for this metadata");
3892 // Return metadata buffer
3893 if (free_and_bufdone_meta_buf) {
3894 mMetadataChannel->bufDone(metadata_buf);
3895 free(metadata_buf);
3896 }
3897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003898
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003899 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003900 }
3901 }
3902
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003903 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3904
3905 // Try to send out capture result metadata.
3906 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003907 return;
3908
Thierry Strudel3d639192016-09-09 11:52:26 -07003909done_metadata:
3910 for (pendingRequestIterator i = mPendingRequestsList.begin();
3911 i != mPendingRequestsList.end() ;i++) {
3912 i->pipeline_depth++;
3913 }
3914 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3915 unblockRequestIfNecessary();
3916}
3917
3918/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003919 * FUNCTION : handleDepthDataWithLock
3920 *
3921 * DESCRIPTION: Handles incoming depth data
3922 *
3923 * PARAMETERS : @depthData : Depth data
3924 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003925 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003926 *
3927 * RETURN :
3928 *
3929 *==========================================================================*/
3930void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003931 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003932 uint32_t currentFrameNumber;
3933 buffer_handle_t *depthBuffer;
3934
3935 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003936 return;
3937 }
3938
3939 camera3_stream_buffer_t resultBuffer =
3940 {.acquire_fence = -1,
3941 .release_fence = -1,
3942 .status = CAMERA3_BUFFER_STATUS_OK,
3943 .buffer = nullptr,
3944 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003945 do {
3946 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3947 if (nullptr == depthBuffer) {
3948 break;
3949 }
3950
Emilian Peev7650c122017-01-19 08:24:33 -08003951 resultBuffer.buffer = depthBuffer;
3952 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003953 if (valid) {
3954 int32_t rc = mDepthChannel->populateDepthData(depthData,
3955 frameNumber);
3956 if (NO_ERROR != rc) {
3957 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3958 } else {
3959 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3960 }
Emilian Peev7650c122017-01-19 08:24:33 -08003961 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003962 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003963 }
3964 } else if (currentFrameNumber > frameNumber) {
3965 break;
3966 } else {
3967 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3968 {{currentFrameNumber, mDepthChannel->getStream(),
3969 CAMERA3_MSG_ERROR_BUFFER}}};
3970 orchestrateNotify(&notify_msg);
3971
3972 LOGE("Depth buffer for frame number: %d is missing "
3973 "returning back!", currentFrameNumber);
3974 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3975 }
3976 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003977 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003978 } while (currentFrameNumber < frameNumber);
3979}
3980
3981/*===========================================================================
3982 * FUNCTION : notifyErrorFoPendingDepthData
3983 *
3984 * DESCRIPTION: Returns error for any pending depth buffers
3985 *
3986 * PARAMETERS : depthCh - depth channel that needs to get flushed
3987 *
3988 * RETURN :
3989 *
3990 *==========================================================================*/
3991void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3992 QCamera3DepthChannel *depthCh) {
3993 uint32_t currentFrameNumber;
3994 buffer_handle_t *depthBuffer;
3995
3996 if (nullptr == depthCh) {
3997 return;
3998 }
3999
4000 camera3_notify_msg_t notify_msg =
4001 {.type = CAMERA3_MSG_ERROR,
4002 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4003 camera3_stream_buffer_t resultBuffer =
4004 {.acquire_fence = -1,
4005 .release_fence = -1,
4006 .buffer = nullptr,
4007 .stream = depthCh->getStream(),
4008 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004009
4010 while (nullptr !=
4011 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4012 depthCh->unmapBuffer(currentFrameNumber);
4013
4014 notify_msg.message.error.frame_number = currentFrameNumber;
4015 orchestrateNotify(&notify_msg);
4016
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004017 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004018 };
4019}
4020
4021/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004022 * FUNCTION : hdrPlusPerfLock
4023 *
4024 * DESCRIPTION: perf lock for HDR+ using custom intent
4025 *
4026 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4027 *
4028 * RETURN : None
4029 *
4030 *==========================================================================*/
4031void QCamera3HardwareInterface::hdrPlusPerfLock(
4032 mm_camera_super_buf_t *metadata_buf)
4033{
4034 if (NULL == metadata_buf) {
4035 LOGE("metadata_buf is NULL");
4036 return;
4037 }
4038 metadata_buffer_t *metadata =
4039 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4040 int32_t *p_frame_number_valid =
4041 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4042 uint32_t *p_frame_number =
4043 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4044
4045 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4046 LOGE("%s: Invalid metadata", __func__);
4047 return;
4048 }
4049
Wei Wang01385482017-08-03 10:49:34 -07004050 //acquire perf lock for 2 secs after the last HDR frame is captured
4051 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004052 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4053 if ((p_frame_number != NULL) &&
4054 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004055 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004056 }
4057 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004058}
4059
4060/*===========================================================================
4061 * FUNCTION : handleInputBufferWithLock
4062 *
4063 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4064 *
4065 * PARAMETERS : @frame_number: frame number of the input buffer
4066 *
4067 * RETURN :
4068 *
4069 *==========================================================================*/
4070void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4071{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004072 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004073 pendingRequestIterator i = mPendingRequestsList.begin();
4074 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4075 i++;
4076 }
4077 if (i != mPendingRequestsList.end() && i->input_buffer) {
4078 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004079 CameraMetadata settings;
4080 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4081 if(i->settings) {
4082 settings = i->settings;
4083 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4084 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004085 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004086 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004087 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004088 } else {
4089 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004090 }
4091
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004092 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4093 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4094 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004095
4096 camera3_capture_result result;
4097 memset(&result, 0, sizeof(camera3_capture_result));
4098 result.frame_number = frame_number;
4099 result.result = i->settings;
4100 result.input_buffer = i->input_buffer;
4101 result.partial_result = PARTIAL_RESULT_COUNT;
4102
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004103 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004104 LOGD("Input request metadata and input buffer frame_number = %u",
4105 i->frame_number);
4106 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004107
4108 // Dispatch result metadata that may be just unblocked by this reprocess result.
4109 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004110 } else {
4111 LOGE("Could not find input request for frame number %d", frame_number);
4112 }
4113}
4114
4115/*===========================================================================
4116 * FUNCTION : handleBufferWithLock
4117 *
4118 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4119 *
4120 * PARAMETERS : @buffer: image buffer for the callback
4121 * @frame_number: frame number of the image buffer
4122 *
4123 * RETURN :
4124 *
4125 *==========================================================================*/
4126void QCamera3HardwareInterface::handleBufferWithLock(
4127 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4128{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004129 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004130
4131 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4132 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4133 }
4134
Thierry Strudel3d639192016-09-09 11:52:26 -07004135 /* Nothing to be done during error state */
4136 if ((ERROR == mState) || (DEINIT == mState)) {
4137 return;
4138 }
4139 if (mFlushPerf) {
4140 handleBuffersDuringFlushLock(buffer);
4141 return;
4142 }
4143 //not in flush
4144 // If the frame number doesn't exist in the pending request list,
4145 // directly send the buffer to the frameworks, and update pending buffers map
4146 // Otherwise, book-keep the buffer.
4147 pendingRequestIterator i = mPendingRequestsList.begin();
4148 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4149 i++;
4150 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004151
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004152 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004153 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004154 // For a reprocessing request, try to send out result metadata.
4155 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004156 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004157 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004158
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004159 // Check if this frame was dropped.
4160 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4161 m != mPendingFrameDropList.end(); m++) {
4162 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4163 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4164 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4165 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4166 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4167 frame_number, streamID);
4168 m = mPendingFrameDropList.erase(m);
4169 break;
4170 }
4171 }
4172
4173 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4174 LOGH("result frame_number = %d, buffer = %p",
4175 frame_number, buffer->buffer);
4176
4177 mPendingBuffersMap.removeBuf(buffer->buffer);
4178 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4179
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004180 if (mPreviewStarted == false) {
4181 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4182 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004183 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4184
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004185 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4186 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4187 mPreviewStarted = true;
4188
4189 // Set power hint for preview
4190 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4191 }
4192 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004193}
4194
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004195void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004196 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004197{
4198 // Find the pending request for this result metadata.
4199 auto requestIter = mPendingRequestsList.begin();
4200 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4201 requestIter++;
4202 }
4203
4204 if (requestIter == mPendingRequestsList.end()) {
4205 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4206 return;
4207 }
4208
4209 // Update the result metadata
4210 requestIter->resultMetadata = resultMetadata;
4211
4212 // Check what type of request this is.
4213 bool liveRequest = false;
4214 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004215 // HDR+ request doesn't have partial results.
4216 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004217 } else if (requestIter->input_buffer != nullptr) {
4218 // Reprocessing request result is the same as settings.
4219 requestIter->resultMetadata = requestIter->settings;
4220 // Reprocessing request doesn't have partial results.
4221 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4222 } else {
4223 liveRequest = true;
4224 requestIter->partial_result_cnt++;
4225 mPendingLiveRequest--;
4226
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004227 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004228 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004229 // For a live request, send the metadata to HDR+ client.
4230 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4231 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4232 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4233 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004234 }
4235 }
4236
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004237 // Remove len shading map if it's not requested.
4238 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4239 CameraMetadata metadata;
4240 metadata.acquire(resultMetadata);
4241 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4242 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4243 &requestIter->requestedLensShadingMapMode, 1);
4244
4245 requestIter->resultMetadata = metadata.release();
4246 }
4247
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004248 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4249}
4250
4251void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4252 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004253 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4254 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004255 bool readyToSend = true;
4256
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004257 // Iterate through the pending requests to send out result metadata that are ready. Also if
4258 // this result metadata belongs to a live request, notify errors for previous live requests
4259 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004260 auto iter = mPendingRequestsList.begin();
4261 while (iter != mPendingRequestsList.end()) {
4262 // Check if current pending request is ready. If it's not ready, the following pending
4263 // requests are also not ready.
4264 if (readyToSend && iter->resultMetadata == nullptr) {
4265 readyToSend = false;
4266 }
4267
4268 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4269
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004270 camera3_capture_result_t result = {};
4271 result.frame_number = iter->frame_number;
4272 result.result = iter->resultMetadata;
4273 result.partial_result = iter->partial_result_cnt;
4274
4275 // If this pending buffer has result metadata, we may be able to send out shutter callback
4276 // and result metadata.
4277 if (iter->resultMetadata != nullptr) {
4278 if (!readyToSend) {
4279 // If any of the previous pending request is not ready, this pending request is
4280 // also not ready to send in order to keep shutter callbacks and result metadata
4281 // in order.
4282 iter++;
4283 continue;
4284 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004285 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004286 // If the result metadata belongs to a live request, notify errors for previous pending
4287 // live requests.
4288 mPendingLiveRequest--;
4289
4290 CameraMetadata dummyMetadata;
4291 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4292 result.result = dummyMetadata.release();
4293
4294 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004295
4296 // partial_result should be PARTIAL_RESULT_CNT in case of
4297 // ERROR_RESULT.
4298 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4299 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004300 } else {
4301 iter++;
4302 continue;
4303 }
4304
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004305 result.output_buffers = nullptr;
4306 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004307 orchestrateResult(&result);
4308
4309 // For reprocessing, result metadata is the same as settings so do not free it here to
4310 // avoid double free.
4311 if (result.result != iter->settings) {
4312 free_camera_metadata((camera_metadata_t *)result.result);
4313 }
4314 iter->resultMetadata = nullptr;
4315 iter = erasePendingRequest(iter);
4316 }
4317
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004318 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004319 for (auto &iter : mPendingRequestsList) {
4320 // Increment pipeline depth for the following pending requests.
4321 if (iter.frame_number > frameNumber) {
4322 iter.pipeline_depth++;
4323 }
4324 }
4325 }
4326
4327 unblockRequestIfNecessary();
4328}
4329
Thierry Strudel3d639192016-09-09 11:52:26 -07004330/*===========================================================================
4331 * FUNCTION : unblockRequestIfNecessary
4332 *
4333 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4334 * that mMutex is held when this function is called.
4335 *
4336 * PARAMETERS :
4337 *
4338 * RETURN :
4339 *
4340 *==========================================================================*/
4341void QCamera3HardwareInterface::unblockRequestIfNecessary()
4342{
4343 // Unblock process_capture_request
4344 pthread_cond_signal(&mRequestCond);
4345}
4346
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004347/*===========================================================================
4348 * FUNCTION : isHdrSnapshotRequest
4349 *
4350 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4351 *
4352 * PARAMETERS : camera3 request structure
4353 *
4354 * RETURN : boolean decision variable
4355 *
4356 *==========================================================================*/
4357bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4358{
4359 if (request == NULL) {
4360 LOGE("Invalid request handle");
4361 assert(0);
4362 return false;
4363 }
4364
4365 if (!mForceHdrSnapshot) {
4366 CameraMetadata frame_settings;
4367 frame_settings = request->settings;
4368
4369 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4370 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4371 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4372 return false;
4373 }
4374 } else {
4375 return false;
4376 }
4377
4378 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4379 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4380 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4381 return false;
4382 }
4383 } else {
4384 return false;
4385 }
4386 }
4387
4388 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4389 if (request->output_buffers[i].stream->format
4390 == HAL_PIXEL_FORMAT_BLOB) {
4391 return true;
4392 }
4393 }
4394
4395 return false;
4396}
4397/*===========================================================================
4398 * FUNCTION : orchestrateRequest
4399 *
4400 * DESCRIPTION: Orchestrates a capture request from camera service
4401 *
4402 * PARAMETERS :
4403 * @request : request from framework to process
4404 *
4405 * RETURN : Error status codes
4406 *
4407 *==========================================================================*/
4408int32_t QCamera3HardwareInterface::orchestrateRequest(
4409 camera3_capture_request_t *request)
4410{
4411
4412 uint32_t originalFrameNumber = request->frame_number;
4413 uint32_t originalOutputCount = request->num_output_buffers;
4414 const camera_metadata_t *original_settings = request->settings;
4415 List<InternalRequest> internallyRequestedStreams;
4416 List<InternalRequest> emptyInternalList;
4417
4418 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4419 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4420 uint32_t internalFrameNumber;
4421 CameraMetadata modified_meta;
4422
4423
4424 /* Add Blob channel to list of internally requested streams */
4425 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4426 if (request->output_buffers[i].stream->format
4427 == HAL_PIXEL_FORMAT_BLOB) {
4428 InternalRequest streamRequested;
4429 streamRequested.meteringOnly = 1;
4430 streamRequested.need_metadata = 0;
4431 streamRequested.stream = request->output_buffers[i].stream;
4432 internallyRequestedStreams.push_back(streamRequested);
4433 }
4434 }
4435 request->num_output_buffers = 0;
4436 auto itr = internallyRequestedStreams.begin();
4437
4438 /* Modify setting to set compensation */
4439 modified_meta = request->settings;
4440 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4441 uint8_t aeLock = 1;
4442 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4443 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4444 camera_metadata_t *modified_settings = modified_meta.release();
4445 request->settings = modified_settings;
4446
4447 /* Capture Settling & -2x frame */
4448 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4449 request->frame_number = internalFrameNumber;
4450 processCaptureRequest(request, internallyRequestedStreams);
4451
4452 request->num_output_buffers = originalOutputCount;
4453 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4454 request->frame_number = internalFrameNumber;
4455 processCaptureRequest(request, emptyInternalList);
4456 request->num_output_buffers = 0;
4457
4458 modified_meta = modified_settings;
4459 expCompensation = 0;
4460 aeLock = 1;
4461 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4462 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4463 modified_settings = modified_meta.release();
4464 request->settings = modified_settings;
4465
4466 /* Capture Settling & 0X frame */
4467
4468 itr = internallyRequestedStreams.begin();
4469 if (itr == internallyRequestedStreams.end()) {
4470 LOGE("Error Internally Requested Stream list is empty");
4471 assert(0);
4472 } else {
4473 itr->need_metadata = 0;
4474 itr->meteringOnly = 1;
4475 }
4476
4477 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4478 request->frame_number = internalFrameNumber;
4479 processCaptureRequest(request, internallyRequestedStreams);
4480
4481 itr = internallyRequestedStreams.begin();
4482 if (itr == internallyRequestedStreams.end()) {
4483 ALOGE("Error Internally Requested Stream list is empty");
4484 assert(0);
4485 } else {
4486 itr->need_metadata = 1;
4487 itr->meteringOnly = 0;
4488 }
4489
4490 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4491 request->frame_number = internalFrameNumber;
4492 processCaptureRequest(request, internallyRequestedStreams);
4493
4494 /* Capture 2X frame*/
4495 modified_meta = modified_settings;
4496 expCompensation = GB_HDR_2X_STEP_EV;
4497 aeLock = 1;
4498 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4499 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4500 modified_settings = modified_meta.release();
4501 request->settings = modified_settings;
4502
4503 itr = internallyRequestedStreams.begin();
4504 if (itr == internallyRequestedStreams.end()) {
4505 ALOGE("Error Internally Requested Stream list is empty");
4506 assert(0);
4507 } else {
4508 itr->need_metadata = 0;
4509 itr->meteringOnly = 1;
4510 }
4511 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4512 request->frame_number = internalFrameNumber;
4513 processCaptureRequest(request, internallyRequestedStreams);
4514
4515 itr = internallyRequestedStreams.begin();
4516 if (itr == internallyRequestedStreams.end()) {
4517 ALOGE("Error Internally Requested Stream list is empty");
4518 assert(0);
4519 } else {
4520 itr->need_metadata = 1;
4521 itr->meteringOnly = 0;
4522 }
4523
4524 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4525 request->frame_number = internalFrameNumber;
4526 processCaptureRequest(request, internallyRequestedStreams);
4527
4528
4529 /* Capture 2X on original streaming config*/
4530 internallyRequestedStreams.clear();
4531
4532 /* Restore original settings pointer */
4533 request->settings = original_settings;
4534 } else {
4535 uint32_t internalFrameNumber;
4536 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4537 request->frame_number = internalFrameNumber;
4538 return processCaptureRequest(request, internallyRequestedStreams);
4539 }
4540
4541 return NO_ERROR;
4542}
4543
4544/*===========================================================================
4545 * FUNCTION : orchestrateResult
4546 *
4547 * DESCRIPTION: Orchestrates a capture result to camera service
4548 *
4549 * PARAMETERS :
4550 * @request : request from framework to process
4551 *
4552 * RETURN :
4553 *
4554 *==========================================================================*/
4555void QCamera3HardwareInterface::orchestrateResult(
4556 camera3_capture_result_t *result)
4557{
4558 uint32_t frameworkFrameNumber;
4559 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4560 frameworkFrameNumber);
4561 if (rc != NO_ERROR) {
4562 LOGE("Cannot find translated frameworkFrameNumber");
4563 assert(0);
4564 } else {
4565 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004566 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004567 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004568 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004569 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4570 camera_metadata_entry_t entry;
4571 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4572 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004573 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004574 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4575 if (ret != OK)
4576 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004577 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004578 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004579 result->frame_number = frameworkFrameNumber;
4580 mCallbackOps->process_capture_result(mCallbackOps, result);
4581 }
4582 }
4583}
4584
4585/*===========================================================================
4586 * FUNCTION : orchestrateNotify
4587 *
4588 * DESCRIPTION: Orchestrates a notify to camera service
4589 *
4590 * PARAMETERS :
4591 * @request : request from framework to process
4592 *
4593 * RETURN :
4594 *
4595 *==========================================================================*/
4596void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4597{
4598 uint32_t frameworkFrameNumber;
4599 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004600 int32_t rc = NO_ERROR;
4601
4602 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004603 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004604
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004605 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004606 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4607 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4608 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004609 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004610 LOGE("Cannot find translated frameworkFrameNumber");
4611 assert(0);
4612 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004613 }
4614 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004615
4616 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4617 LOGD("Internal Request drop the notifyCb");
4618 } else {
4619 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4620 mCallbackOps->notify(mCallbackOps, notify_msg);
4621 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004622}
4623
4624/*===========================================================================
4625 * FUNCTION : FrameNumberRegistry
4626 *
4627 * DESCRIPTION: Constructor
4628 *
4629 * PARAMETERS :
4630 *
4631 * RETURN :
4632 *
4633 *==========================================================================*/
4634FrameNumberRegistry::FrameNumberRegistry()
4635{
4636 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4637}
4638
4639/*===========================================================================
4640 * FUNCTION : ~FrameNumberRegistry
4641 *
4642 * DESCRIPTION: Destructor
4643 *
4644 * PARAMETERS :
4645 *
4646 * RETURN :
4647 *
4648 *==========================================================================*/
4649FrameNumberRegistry::~FrameNumberRegistry()
4650{
4651}
4652
4653/*===========================================================================
4654 * FUNCTION : PurgeOldEntriesLocked
4655 *
4656 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4657 *
4658 * PARAMETERS :
4659 *
4660 * RETURN : NONE
4661 *
4662 *==========================================================================*/
4663void FrameNumberRegistry::purgeOldEntriesLocked()
4664{
4665 while (_register.begin() != _register.end()) {
4666 auto itr = _register.begin();
4667 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4668 _register.erase(itr);
4669 } else {
4670 return;
4671 }
4672 }
4673}
4674
4675/*===========================================================================
4676 * FUNCTION : allocStoreInternalFrameNumber
4677 *
4678 * DESCRIPTION: Method to note down a framework request and associate a new
4679 * internal request number against it
4680 *
4681 * PARAMETERS :
4682 * @fFrameNumber: Identifier given by framework
4683 * @internalFN : Output parameter which will have the newly generated internal
4684 * entry
4685 *
4686 * RETURN : Error code
4687 *
4688 *==========================================================================*/
4689int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4690 uint32_t &internalFrameNumber)
4691{
4692 Mutex::Autolock lock(mRegistryLock);
4693 internalFrameNumber = _nextFreeInternalNumber++;
4694 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4695 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4696 purgeOldEntriesLocked();
4697 return NO_ERROR;
4698}
4699
4700/*===========================================================================
4701 * FUNCTION : generateStoreInternalFrameNumber
4702 *
4703 * DESCRIPTION: Method to associate a new internal request number independent
4704 * of any associate with framework requests
4705 *
4706 * PARAMETERS :
4707 * @internalFrame#: Output parameter which will have the newly generated internal
4708 *
4709 *
4710 * RETURN : Error code
4711 *
4712 *==========================================================================*/
4713int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4714{
4715 Mutex::Autolock lock(mRegistryLock);
4716 internalFrameNumber = _nextFreeInternalNumber++;
4717 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4718 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4719 purgeOldEntriesLocked();
4720 return NO_ERROR;
4721}
4722
4723/*===========================================================================
4724 * FUNCTION : getFrameworkFrameNumber
4725 *
4726 * DESCRIPTION: Method to query the framework framenumber given an internal #
4727 *
4728 * PARAMETERS :
4729 * @internalFrame#: Internal reference
4730 * @frameworkframenumber: Output parameter holding framework frame entry
4731 *
4732 * RETURN : Error code
4733 *
4734 *==========================================================================*/
4735int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4736 uint32_t &frameworkFrameNumber)
4737{
4738 Mutex::Autolock lock(mRegistryLock);
4739 auto itr = _register.find(internalFrameNumber);
4740 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004741 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004742 return -ENOENT;
4743 }
4744
4745 frameworkFrameNumber = itr->second;
4746 purgeOldEntriesLocked();
4747 return NO_ERROR;
4748}
Thierry Strudel3d639192016-09-09 11:52:26 -07004749
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004750status_t QCamera3HardwareInterface::fillPbStreamConfig(
4751 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4752 QCamera3Channel *channel, uint32_t streamIndex) {
4753 if (config == nullptr) {
4754 LOGE("%s: config is null", __FUNCTION__);
4755 return BAD_VALUE;
4756 }
4757
4758 if (channel == nullptr) {
4759 LOGE("%s: channel is null", __FUNCTION__);
4760 return BAD_VALUE;
4761 }
4762
4763 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4764 if (stream == nullptr) {
4765 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4766 return NAME_NOT_FOUND;
4767 }
4768
4769 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4770 if (streamInfo == nullptr) {
4771 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4772 return NAME_NOT_FOUND;
4773 }
4774
4775 config->id = pbStreamId;
4776 config->image.width = streamInfo->dim.width;
4777 config->image.height = streamInfo->dim.height;
4778 config->image.padding = 0;
4779 config->image.format = pbStreamFormat;
4780
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004781 uint32_t totalPlaneSize = 0;
4782
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004783 // Fill plane information.
4784 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4785 pbcamera::PlaneConfiguration plane;
4786 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4787 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4788 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004789
4790 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004791 }
4792
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004793 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004794 return OK;
4795}
4796
Thierry Strudel3d639192016-09-09 11:52:26 -07004797/*===========================================================================
4798 * FUNCTION : processCaptureRequest
4799 *
4800 * DESCRIPTION: process a capture request from camera service
4801 *
4802 * PARAMETERS :
4803 * @request : request from framework to process
4804 *
4805 * RETURN :
4806 *
4807 *==========================================================================*/
4808int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004809 camera3_capture_request_t *request,
4810 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004811{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004812 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004813 int rc = NO_ERROR;
4814 int32_t request_id;
4815 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004816 bool isVidBufRequested = false;
4817 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004818 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004819
4820 pthread_mutex_lock(&mMutex);
4821
4822 // Validate current state
4823 switch (mState) {
4824 case CONFIGURED:
4825 case STARTED:
4826 /* valid state */
4827 break;
4828
4829 case ERROR:
4830 pthread_mutex_unlock(&mMutex);
4831 handleCameraDeviceError();
4832 return -ENODEV;
4833
4834 default:
4835 LOGE("Invalid state %d", mState);
4836 pthread_mutex_unlock(&mMutex);
4837 return -ENODEV;
4838 }
4839
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004840 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004841 if (rc != NO_ERROR) {
4842 LOGE("incoming request is not valid");
4843 pthread_mutex_unlock(&mMutex);
4844 return rc;
4845 }
4846
4847 meta = request->settings;
4848
4849 // For first capture request, send capture intent, and
4850 // stream on all streams
4851 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004852 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004853 // send an unconfigure to the backend so that the isp
4854 // resources are deallocated
4855 if (!mFirstConfiguration) {
4856 cam_stream_size_info_t stream_config_info;
4857 int32_t hal_version = CAM_HAL_V3;
4858 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4859 stream_config_info.buffer_info.min_buffers =
4860 MIN_INFLIGHT_REQUESTS;
4861 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004862 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004863 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004864 clear_metadata_buffer(mParameters);
4865 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4866 CAM_INTF_PARM_HAL_VERSION, hal_version);
4867 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4868 CAM_INTF_META_STREAM_INFO, stream_config_info);
4869 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4870 mParameters);
4871 if (rc < 0) {
4872 LOGE("set_parms for unconfigure failed");
4873 pthread_mutex_unlock(&mMutex);
4874 return rc;
4875 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004876
Thierry Strudel3d639192016-09-09 11:52:26 -07004877 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004878 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004879 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004880 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004881 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004882 property_get("persist.camera.is_type", is_type_value, "4");
4883 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4884 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4885 property_get("persist.camera.is_type_preview", is_type_value, "4");
4886 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4887 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004888
4889 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4890 int32_t hal_version = CAM_HAL_V3;
4891 uint8_t captureIntent =
4892 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4893 mCaptureIntent = captureIntent;
4894 clear_metadata_buffer(mParameters);
4895 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4896 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4897 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004898 if (mFirstConfiguration) {
4899 // configure instant AEC
4900 // Instant AEC is a session based parameter and it is needed only
4901 // once per complete session after open camera.
4902 // i.e. This is set only once for the first capture request, after open camera.
4903 setInstantAEC(meta);
4904 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004905 uint8_t fwkVideoStabMode=0;
4906 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4907 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4908 }
4909
Xue Tuecac74e2017-04-17 13:58:15 -07004910 // If EIS setprop is enabled then only turn it on for video/preview
4911 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004912 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004913 int32_t vsMode;
4914 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4916 rc = BAD_VALUE;
4917 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004918 LOGD("setEis %d", setEis);
4919 bool eis3Supported = false;
4920 size_t count = IS_TYPE_MAX;
4921 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4922 for (size_t i = 0; i < count; i++) {
4923 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4924 eis3Supported = true;
4925 break;
4926 }
4927 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004928
4929 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004930 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004931 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4932 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004933 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4934 is_type = isTypePreview;
4935 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4936 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4937 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004938 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004939 } else {
4940 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004941 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004942 } else {
4943 is_type = IS_TYPE_NONE;
4944 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004945 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004946 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004947 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4948 }
4949 }
4950
4951 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4952 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4953
Thierry Strudel54dc9782017-02-15 12:12:10 -08004954 //Disable tintless only if the property is set to 0
4955 memset(prop, 0, sizeof(prop));
4956 property_get("persist.camera.tintless.enable", prop, "1");
4957 int32_t tintless_value = atoi(prop);
4958
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4960 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004961
Thierry Strudel3d639192016-09-09 11:52:26 -07004962 //Disable CDS for HFR mode or if DIS/EIS is on.
4963 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4964 //after every configure_stream
4965 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4966 (m_bIsVideo)) {
4967 int32_t cds = CAM_CDS_MODE_OFF;
4968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4969 CAM_INTF_PARM_CDS_MODE, cds))
4970 LOGE("Failed to disable CDS for HFR mode");
4971
4972 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004973
4974 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4975 uint8_t* use_av_timer = NULL;
4976
4977 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004978 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004979 use_av_timer = &m_debug_avtimer;
4980 }
4981 else{
4982 use_av_timer =
4983 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004984 if (use_av_timer) {
4985 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4986 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004987 }
4988
4989 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4990 rc = BAD_VALUE;
4991 }
4992 }
4993
Thierry Strudel3d639192016-09-09 11:52:26 -07004994 setMobicat();
4995
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004996 uint8_t nrMode = 0;
4997 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4998 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4999 }
5000
Thierry Strudel3d639192016-09-09 11:52:26 -07005001 /* Set fps and hfr mode while sending meta stream info so that sensor
5002 * can configure appropriate streaming mode */
5003 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005004 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5005 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005006 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5007 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005008 if (rc == NO_ERROR) {
5009 int32_t max_fps =
5010 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005011 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005012 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5013 }
5014 /* For HFR, more buffers are dequeued upfront to improve the performance */
5015 if (mBatchSize) {
5016 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5017 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5018 }
5019 }
5020 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005021 LOGE("setHalFpsRange failed");
5022 }
5023 }
5024 if (meta.exists(ANDROID_CONTROL_MODE)) {
5025 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5026 rc = extractSceneMode(meta, metaMode, mParameters);
5027 if (rc != NO_ERROR) {
5028 LOGE("extractSceneMode failed");
5029 }
5030 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005031 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005032
Thierry Strudel04e026f2016-10-10 11:27:36 -07005033 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5034 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5035 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5036 rc = setVideoHdrMode(mParameters, vhdr);
5037 if (rc != NO_ERROR) {
5038 LOGE("setVideoHDR is failed");
5039 }
5040 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005041
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005042 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005043 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005044 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005045 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5046 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5047 sensorModeFullFov)) {
5048 rc = BAD_VALUE;
5049 }
5050 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005051 //TODO: validate the arguments, HSV scenemode should have only the
5052 //advertised fps ranges
5053
5054 /*set the capture intent, hal version, tintless, stream info,
5055 *and disenable parameters to the backend*/
5056 LOGD("set_parms META_STREAM_INFO " );
5057 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005058 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5059 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005060 mStreamConfigInfo.type[i],
5061 mStreamConfigInfo.stream_sizes[i].width,
5062 mStreamConfigInfo.stream_sizes[i].height,
5063 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005064 mStreamConfigInfo.format[i],
5065 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005066 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005067
Thierry Strudel3d639192016-09-09 11:52:26 -07005068 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5069 mParameters);
5070 if (rc < 0) {
5071 LOGE("set_parms failed for hal version, stream info");
5072 }
5073
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005074 cam_sensor_mode_info_t sensorModeInfo = {};
5075 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005076 if (rc != NO_ERROR) {
5077 LOGE("Failed to get sensor output size");
5078 pthread_mutex_unlock(&mMutex);
5079 goto error_exit;
5080 }
5081
5082 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5083 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005084 sensorModeInfo.active_array_size.width,
5085 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005086
5087 /* Set batchmode before initializing channel. Since registerBuffer
5088 * internally initializes some of the channels, better set batchmode
5089 * even before first register buffer */
5090 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5091 it != mStreamInfo.end(); it++) {
5092 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5093 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5094 && mBatchSize) {
5095 rc = channel->setBatchSize(mBatchSize);
5096 //Disable per frame map unmap for HFR/batchmode case
5097 rc |= channel->setPerFrameMapUnmap(false);
5098 if (NO_ERROR != rc) {
5099 LOGE("Channel init failed %d", rc);
5100 pthread_mutex_unlock(&mMutex);
5101 goto error_exit;
5102 }
5103 }
5104 }
5105
5106 //First initialize all streams
5107 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5108 it != mStreamInfo.end(); it++) {
5109 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005110
5111 /* Initial value of NR mode is needed before stream on */
5112 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005113 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5114 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005115 setEis) {
5116 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5117 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5118 is_type = mStreamConfigInfo.is_type[i];
5119 break;
5120 }
5121 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005122 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005123 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005124 rc = channel->initialize(IS_TYPE_NONE);
5125 }
5126 if (NO_ERROR != rc) {
5127 LOGE("Channel initialization failed %d", rc);
5128 pthread_mutex_unlock(&mMutex);
5129 goto error_exit;
5130 }
5131 }
5132
5133 if (mRawDumpChannel) {
5134 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5135 if (rc != NO_ERROR) {
5136 LOGE("Error: Raw Dump Channel init failed");
5137 pthread_mutex_unlock(&mMutex);
5138 goto error_exit;
5139 }
5140 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005141 if (mHdrPlusRawSrcChannel) {
5142 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5143 if (rc != NO_ERROR) {
5144 LOGE("Error: HDR+ RAW Source Channel init failed");
5145 pthread_mutex_unlock(&mMutex);
5146 goto error_exit;
5147 }
5148 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005149 if (mSupportChannel) {
5150 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5151 if (rc < 0) {
5152 LOGE("Support channel initialization failed");
5153 pthread_mutex_unlock(&mMutex);
5154 goto error_exit;
5155 }
5156 }
5157 if (mAnalysisChannel) {
5158 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5159 if (rc < 0) {
5160 LOGE("Analysis channel initialization failed");
5161 pthread_mutex_unlock(&mMutex);
5162 goto error_exit;
5163 }
5164 }
5165 if (mDummyBatchChannel) {
5166 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5167 if (rc < 0) {
5168 LOGE("mDummyBatchChannel setBatchSize failed");
5169 pthread_mutex_unlock(&mMutex);
5170 goto error_exit;
5171 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005172 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005173 if (rc < 0) {
5174 LOGE("mDummyBatchChannel initialization failed");
5175 pthread_mutex_unlock(&mMutex);
5176 goto error_exit;
5177 }
5178 }
5179
5180 // Set bundle info
5181 rc = setBundleInfo();
5182 if (rc < 0) {
5183 LOGE("setBundleInfo failed %d", rc);
5184 pthread_mutex_unlock(&mMutex);
5185 goto error_exit;
5186 }
5187
5188 //update settings from app here
5189 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5190 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5191 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5192 }
5193 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5194 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5195 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5196 }
5197 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5198 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5199 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5200
5201 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5202 (mLinkedCameraId != mCameraId) ) {
5203 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5204 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005205 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005206 goto error_exit;
5207 }
5208 }
5209
5210 // add bundle related cameras
5211 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5212 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005213 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5214 &m_pDualCamCmdPtr->bundle_info;
5215 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005216 if (mIsDeviceLinked)
5217 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5218 else
5219 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5220
5221 pthread_mutex_lock(&gCamLock);
5222
5223 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5224 LOGE("Dualcam: Invalid Session Id ");
5225 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005226 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005227 goto error_exit;
5228 }
5229
5230 if (mIsMainCamera == 1) {
5231 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5232 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005233 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005234 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005235 // related session id should be session id of linked session
5236 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5237 } else {
5238 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5239 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005240 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005241 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005242 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5243 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005244 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005245 pthread_mutex_unlock(&gCamLock);
5246
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005247 rc = mCameraHandle->ops->set_dual_cam_cmd(
5248 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005249 if (rc < 0) {
5250 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005251 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005252 goto error_exit;
5253 }
5254 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005255 goto no_error;
5256error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005257 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005258 return rc;
5259no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005260 mWokenUpByDaemon = false;
5261 mPendingLiveRequest = 0;
5262 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005263 }
5264
5265 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005266 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005267
5268 if (mFlushPerf) {
5269 //we cannot accept any requests during flush
5270 LOGE("process_capture_request cannot proceed during flush");
5271 pthread_mutex_unlock(&mMutex);
5272 return NO_ERROR; //should return an error
5273 }
5274
5275 if (meta.exists(ANDROID_REQUEST_ID)) {
5276 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5277 mCurrentRequestId = request_id;
5278 LOGD("Received request with id: %d", request_id);
5279 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5280 LOGE("Unable to find request id field, \
5281 & no previous id available");
5282 pthread_mutex_unlock(&mMutex);
5283 return NAME_NOT_FOUND;
5284 } else {
5285 LOGD("Re-using old request id");
5286 request_id = mCurrentRequestId;
5287 }
5288
5289 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5290 request->num_output_buffers,
5291 request->input_buffer,
5292 frameNumber);
5293 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005294 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005295 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005296 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005297 uint32_t snapshotStreamId = 0;
5298 for (size_t i = 0; i < request->num_output_buffers; i++) {
5299 const camera3_stream_buffer_t& output = request->output_buffers[i];
5300 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5301
Emilian Peev7650c122017-01-19 08:24:33 -08005302 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5303 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005304 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005305 blob_request = 1;
5306 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5307 }
5308
5309 if (output.acquire_fence != -1) {
5310 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5311 close(output.acquire_fence);
5312 if (rc != OK) {
5313 LOGE("sync wait failed %d", rc);
5314 pthread_mutex_unlock(&mMutex);
5315 return rc;
5316 }
5317 }
5318
Emilian Peev0f3c3162017-03-15 12:57:46 +00005319 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5320 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005321 depthRequestPresent = true;
5322 continue;
5323 }
5324
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005325 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005326 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005327
5328 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5329 isVidBufRequested = true;
5330 }
5331 }
5332
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005333 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5334 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5335 itr++) {
5336 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5337 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5338 channel->getStreamID(channel->getStreamTypeMask());
5339
5340 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5341 isVidBufRequested = true;
5342 }
5343 }
5344
Thierry Strudel3d639192016-09-09 11:52:26 -07005345 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005346 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005347 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005348 }
5349 if (blob_request && mRawDumpChannel) {
5350 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005351 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005352 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005353 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005354 }
5355
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005356 {
5357 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5358 // Request a RAW buffer if
5359 // 1. mHdrPlusRawSrcChannel is valid.
5360 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5361 // 3. There is no pending HDR+ request.
5362 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5363 mHdrPlusPendingRequests.size() == 0) {
5364 streamsArray.stream_request[streamsArray.num_streams].streamID =
5365 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5366 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5367 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005368 }
5369
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005370 //extract capture intent
5371 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5372 mCaptureIntent =
5373 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5374 }
5375
5376 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5377 mCacMode =
5378 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5379 }
5380
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005381 uint8_t requestedLensShadingMapMode;
5382 // Get the shading map mode.
5383 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5384 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5385 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5386 } else {
5387 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5388 }
5389
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005390 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005391 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005392
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005393 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005394 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005395 // If this request has a still capture intent, try to submit an HDR+ request.
5396 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5397 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5398 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5399 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005400 }
5401
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005402 if (hdrPlusRequest) {
5403 // For a HDR+ request, just set the frame parameters.
5404 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5405 if (rc < 0) {
5406 LOGE("fail to set frame parameters");
5407 pthread_mutex_unlock(&mMutex);
5408 return rc;
5409 }
5410 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005411 /* Parse the settings:
5412 * - For every request in NORMAL MODE
5413 * - For every request in HFR mode during preview only case
5414 * - For first request of every batch in HFR mode during video
5415 * recording. In batchmode the same settings except frame number is
5416 * repeated in each request of the batch.
5417 */
5418 if (!mBatchSize ||
5419 (mBatchSize && !isVidBufRequested) ||
5420 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005421 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005422 if (rc < 0) {
5423 LOGE("fail to set frame parameters");
5424 pthread_mutex_unlock(&mMutex);
5425 return rc;
5426 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005427
5428 {
5429 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5430 // will be reported in result metadata.
5431 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5432 if (mHdrPlusModeEnabled) {
5433 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5434 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5435 }
5436 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005437 }
5438 /* For batchMode HFR, setFrameParameters is not called for every
5439 * request. But only frame number of the latest request is parsed.
5440 * Keep track of first and last frame numbers in a batch so that
5441 * metadata for the frame numbers of batch can be duplicated in
5442 * handleBatchMetadta */
5443 if (mBatchSize) {
5444 if (!mToBeQueuedVidBufs) {
5445 //start of the batch
5446 mFirstFrameNumberInBatch = request->frame_number;
5447 }
5448 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5449 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5450 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005451 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005452 return BAD_VALUE;
5453 }
5454 }
5455 if (mNeedSensorRestart) {
5456 /* Unlock the mutex as restartSensor waits on the channels to be
5457 * stopped, which in turn calls stream callback functions -
5458 * handleBufferWithLock and handleMetadataWithLock */
5459 pthread_mutex_unlock(&mMutex);
5460 rc = dynamicUpdateMetaStreamInfo();
5461 if (rc != NO_ERROR) {
5462 LOGE("Restarting the sensor failed");
5463 return BAD_VALUE;
5464 }
5465 mNeedSensorRestart = false;
5466 pthread_mutex_lock(&mMutex);
5467 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005468 if(mResetInstantAEC) {
5469 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5470 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5471 mResetInstantAEC = false;
5472 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005473 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005474 if (request->input_buffer->acquire_fence != -1) {
5475 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5476 close(request->input_buffer->acquire_fence);
5477 if (rc != OK) {
5478 LOGE("input buffer sync wait failed %d", rc);
5479 pthread_mutex_unlock(&mMutex);
5480 return rc;
5481 }
5482 }
5483 }
5484
5485 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5486 mLastCustIntentFrmNum = frameNumber;
5487 }
5488 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005489 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005490 pendingRequestIterator latestRequest;
5491 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005492 pendingRequest.num_buffers = depthRequestPresent ?
5493 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005494 pendingRequest.request_id = request_id;
5495 pendingRequest.blob_request = blob_request;
5496 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005497 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005498 if (request->input_buffer) {
5499 pendingRequest.input_buffer =
5500 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5501 *(pendingRequest.input_buffer) = *(request->input_buffer);
5502 pInputBuffer = pendingRequest.input_buffer;
5503 } else {
5504 pendingRequest.input_buffer = NULL;
5505 pInputBuffer = NULL;
5506 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005507 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005508
5509 pendingRequest.pipeline_depth = 0;
5510 pendingRequest.partial_result_cnt = 0;
5511 extractJpegMetadata(mCurJpegMeta, request);
5512 pendingRequest.jpegMetadata = mCurJpegMeta;
5513 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005514 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005515 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5516 mHybridAeEnable =
5517 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5518 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005519
5520 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5521 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005522 /* DevCamDebug metadata processCaptureRequest */
5523 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5524 mDevCamDebugMetaEnable =
5525 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5526 }
5527 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5528 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005529
5530 //extract CAC info
5531 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5532 mCacMode =
5533 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5534 }
5535 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005536 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005537 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5538 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005539
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005540 // extract enableZsl info
5541 if (gExposeEnableZslKey) {
5542 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5543 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5544 mZslEnabled = pendingRequest.enableZsl;
5545 } else {
5546 pendingRequest.enableZsl = mZslEnabled;
5547 }
5548 }
5549
Thierry Strudel3d639192016-09-09 11:52:26 -07005550 PendingBuffersInRequest bufsForCurRequest;
5551 bufsForCurRequest.frame_number = frameNumber;
5552 // Mark current timestamp for the new request
5553 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005554 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005555
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005556 if (hdrPlusRequest) {
5557 // Save settings for this request.
5558 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5559 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5560
5561 // Add to pending HDR+ request queue.
5562 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5563 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5564
5565 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5566 }
5567
Thierry Strudel3d639192016-09-09 11:52:26 -07005568 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005569 if ((request->output_buffers[i].stream->data_space ==
5570 HAL_DATASPACE_DEPTH) &&
5571 (HAL_PIXEL_FORMAT_BLOB ==
5572 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005573 continue;
5574 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005575 RequestedBufferInfo requestedBuf;
5576 memset(&requestedBuf, 0, sizeof(requestedBuf));
5577 requestedBuf.stream = request->output_buffers[i].stream;
5578 requestedBuf.buffer = NULL;
5579 pendingRequest.buffers.push_back(requestedBuf);
5580
5581 // Add to buffer handle the pending buffers list
5582 PendingBufferInfo bufferInfo;
5583 bufferInfo.buffer = request->output_buffers[i].buffer;
5584 bufferInfo.stream = request->output_buffers[i].stream;
5585 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5586 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5587 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5588 frameNumber, bufferInfo.buffer,
5589 channel->getStreamTypeMask(), bufferInfo.stream->format);
5590 }
5591 // Add this request packet into mPendingBuffersMap
5592 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5593 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5594 mPendingBuffersMap.get_num_overall_buffers());
5595
5596 latestRequest = mPendingRequestsList.insert(
5597 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005598
5599 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5600 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005601 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005602 for (size_t i = 0; i < request->num_output_buffers; i++) {
5603 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5604 }
5605
Thierry Strudel3d639192016-09-09 11:52:26 -07005606 if(mFlush) {
5607 LOGI("mFlush is true");
5608 pthread_mutex_unlock(&mMutex);
5609 return NO_ERROR;
5610 }
5611
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005612 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5613 // channel.
5614 if (!hdrPlusRequest) {
5615 int indexUsed;
5616 // Notify metadata channel we receive a request
5617 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005618
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005619 if(request->input_buffer != NULL){
5620 LOGD("Input request, frame_number %d", frameNumber);
5621 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5622 if (NO_ERROR != rc) {
5623 LOGE("fail to set reproc parameters");
5624 pthread_mutex_unlock(&mMutex);
5625 return rc;
5626 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005627 }
5628
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005629 // Call request on other streams
5630 uint32_t streams_need_metadata = 0;
5631 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5632 for (size_t i = 0; i < request->num_output_buffers; i++) {
5633 const camera3_stream_buffer_t& output = request->output_buffers[i];
5634 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5635
5636 if (channel == NULL) {
5637 LOGW("invalid channel pointer for stream");
5638 continue;
5639 }
5640
5641 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5642 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5643 output.buffer, request->input_buffer, frameNumber);
5644 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005645 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005646 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5647 if (rc < 0) {
5648 LOGE("Fail to request on picture channel");
5649 pthread_mutex_unlock(&mMutex);
5650 return rc;
5651 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005652 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005653 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5654 assert(NULL != mDepthChannel);
5655 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005656
Emilian Peev7650c122017-01-19 08:24:33 -08005657 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5658 if (rc < 0) {
5659 LOGE("Fail to map on depth buffer");
5660 pthread_mutex_unlock(&mMutex);
5661 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005662 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005663 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005664 } else {
5665 LOGD("snapshot request with buffer %p, frame_number %d",
5666 output.buffer, frameNumber);
5667 if (!request->settings) {
5668 rc = channel->request(output.buffer, frameNumber,
5669 NULL, mPrevParameters, indexUsed);
5670 } else {
5671 rc = channel->request(output.buffer, frameNumber,
5672 NULL, mParameters, indexUsed);
5673 }
5674 if (rc < 0) {
5675 LOGE("Fail to request on picture channel");
5676 pthread_mutex_unlock(&mMutex);
5677 return rc;
5678 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005679
Emilian Peev7650c122017-01-19 08:24:33 -08005680 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5681 uint32_t j = 0;
5682 for (j = 0; j < streamsArray.num_streams; j++) {
5683 if (streamsArray.stream_request[j].streamID == streamId) {
5684 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5685 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5686 else
5687 streamsArray.stream_request[j].buf_index = indexUsed;
5688 break;
5689 }
5690 }
5691 if (j == streamsArray.num_streams) {
5692 LOGE("Did not find matching stream to update index");
5693 assert(0);
5694 }
5695
5696 pendingBufferIter->need_metadata = true;
5697 streams_need_metadata++;
5698 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005699 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005700 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5701 bool needMetadata = false;
5702 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5703 rc = yuvChannel->request(output.buffer, frameNumber,
5704 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5705 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005706 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005707 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005708 pthread_mutex_unlock(&mMutex);
5709 return rc;
5710 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005711
5712 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5713 uint32_t j = 0;
5714 for (j = 0; j < streamsArray.num_streams; j++) {
5715 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005716 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5717 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5718 else
5719 streamsArray.stream_request[j].buf_index = indexUsed;
5720 break;
5721 }
5722 }
5723 if (j == streamsArray.num_streams) {
5724 LOGE("Did not find matching stream to update index");
5725 assert(0);
5726 }
5727
5728 pendingBufferIter->need_metadata = needMetadata;
5729 if (needMetadata)
5730 streams_need_metadata += 1;
5731 LOGD("calling YUV channel request, need_metadata is %d",
5732 needMetadata);
5733 } else {
5734 LOGD("request with buffer %p, frame_number %d",
5735 output.buffer, frameNumber);
5736
5737 rc = channel->request(output.buffer, frameNumber, indexUsed);
5738
5739 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5740 uint32_t j = 0;
5741 for (j = 0; j < streamsArray.num_streams; j++) {
5742 if (streamsArray.stream_request[j].streamID == streamId) {
5743 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5744 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5745 else
5746 streamsArray.stream_request[j].buf_index = indexUsed;
5747 break;
5748 }
5749 }
5750 if (j == streamsArray.num_streams) {
5751 LOGE("Did not find matching stream to update index");
5752 assert(0);
5753 }
5754
5755 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5756 && mBatchSize) {
5757 mToBeQueuedVidBufs++;
5758 if (mToBeQueuedVidBufs == mBatchSize) {
5759 channel->queueBatchBuf();
5760 }
5761 }
5762 if (rc < 0) {
5763 LOGE("request failed");
5764 pthread_mutex_unlock(&mMutex);
5765 return rc;
5766 }
5767 }
5768 pendingBufferIter++;
5769 }
5770
5771 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5772 itr++) {
5773 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5774
5775 if (channel == NULL) {
5776 LOGE("invalid channel pointer for stream");
5777 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005778 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005779 return BAD_VALUE;
5780 }
5781
5782 InternalRequest requestedStream;
5783 requestedStream = (*itr);
5784
5785
5786 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5787 LOGD("snapshot request internally input buffer %p, frame_number %d",
5788 request->input_buffer, frameNumber);
5789 if(request->input_buffer != NULL){
5790 rc = channel->request(NULL, frameNumber,
5791 pInputBuffer, &mReprocMeta, indexUsed, true,
5792 requestedStream.meteringOnly);
5793 if (rc < 0) {
5794 LOGE("Fail to request on picture channel");
5795 pthread_mutex_unlock(&mMutex);
5796 return rc;
5797 }
5798 } else {
5799 LOGD("snapshot request with frame_number %d", frameNumber);
5800 if (!request->settings) {
5801 rc = channel->request(NULL, frameNumber,
5802 NULL, mPrevParameters, indexUsed, true,
5803 requestedStream.meteringOnly);
5804 } else {
5805 rc = channel->request(NULL, frameNumber,
5806 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5807 }
5808 if (rc < 0) {
5809 LOGE("Fail to request on picture channel");
5810 pthread_mutex_unlock(&mMutex);
5811 return rc;
5812 }
5813
5814 if ((*itr).meteringOnly != 1) {
5815 requestedStream.need_metadata = 1;
5816 streams_need_metadata++;
5817 }
5818 }
5819
5820 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5821 uint32_t j = 0;
5822 for (j = 0; j < streamsArray.num_streams; j++) {
5823 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005824 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5825 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5826 else
5827 streamsArray.stream_request[j].buf_index = indexUsed;
5828 break;
5829 }
5830 }
5831 if (j == streamsArray.num_streams) {
5832 LOGE("Did not find matching stream to update index");
5833 assert(0);
5834 }
5835
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005836 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005837 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005838 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005839 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005840 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005841 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005842 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005843 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005844
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005845 //If 2 streams have need_metadata set to true, fail the request, unless
5846 //we copy/reference count the metadata buffer
5847 if (streams_need_metadata > 1) {
5848 LOGE("not supporting request in which two streams requires"
5849 " 2 HAL metadata for reprocessing");
5850 pthread_mutex_unlock(&mMutex);
5851 return -EINVAL;
5852 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005853
Emilian Peev656e4fa2017-06-02 16:47:04 +01005854 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5855 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5856 if (depthRequestPresent && mDepthChannel) {
5857 if (request->settings) {
5858 camera_metadata_ro_entry entry;
5859 if (find_camera_metadata_ro_entry(request->settings,
5860 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5861 if (entry.data.u8[0]) {
5862 pdafEnable = CAM_PD_DATA_ENABLED;
5863 } else {
5864 pdafEnable = CAM_PD_DATA_SKIP;
5865 }
5866 mDepthCloudMode = pdafEnable;
5867 } else {
5868 pdafEnable = mDepthCloudMode;
5869 }
5870 } else {
5871 pdafEnable = mDepthCloudMode;
5872 }
5873 }
5874
Emilian Peev7650c122017-01-19 08:24:33 -08005875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5876 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5877 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5878 pthread_mutex_unlock(&mMutex);
5879 return BAD_VALUE;
5880 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005881
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005882 if (request->input_buffer == NULL) {
5883 /* Set the parameters to backend:
5884 * - For every request in NORMAL MODE
5885 * - For every request in HFR mode during preview only case
5886 * - Once every batch in HFR mode during video recording
5887 */
5888 if (!mBatchSize ||
5889 (mBatchSize && !isVidBufRequested) ||
5890 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5891 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5892 mBatchSize, isVidBufRequested,
5893 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005894
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005895 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5896 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5897 uint32_t m = 0;
5898 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5899 if (streamsArray.stream_request[k].streamID ==
5900 mBatchedStreamsArray.stream_request[m].streamID)
5901 break;
5902 }
5903 if (m == mBatchedStreamsArray.num_streams) {
5904 mBatchedStreamsArray.stream_request\
5905 [mBatchedStreamsArray.num_streams].streamID =
5906 streamsArray.stream_request[k].streamID;
5907 mBatchedStreamsArray.stream_request\
5908 [mBatchedStreamsArray.num_streams].buf_index =
5909 streamsArray.stream_request[k].buf_index;
5910 mBatchedStreamsArray.num_streams =
5911 mBatchedStreamsArray.num_streams + 1;
5912 }
5913 }
5914 streamsArray = mBatchedStreamsArray;
5915 }
5916 /* Update stream id of all the requested buffers */
5917 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5918 streamsArray)) {
5919 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005920 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005921 return BAD_VALUE;
5922 }
5923
5924 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5925 mParameters);
5926 if (rc < 0) {
5927 LOGE("set_parms failed");
5928 }
5929 /* reset to zero coz, the batch is queued */
5930 mToBeQueuedVidBufs = 0;
5931 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5932 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5933 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005934 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5935 uint32_t m = 0;
5936 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5937 if (streamsArray.stream_request[k].streamID ==
5938 mBatchedStreamsArray.stream_request[m].streamID)
5939 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005940 }
5941 if (m == mBatchedStreamsArray.num_streams) {
5942 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5943 streamID = streamsArray.stream_request[k].streamID;
5944 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5945 buf_index = streamsArray.stream_request[k].buf_index;
5946 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5947 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005948 }
5949 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005950 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005951
5952 // Start all streams after the first setting is sent, so that the
5953 // setting can be applied sooner: (0 + apply_delay)th frame.
5954 if (mState == CONFIGURED && mChannelHandle) {
5955 //Then start them.
5956 LOGH("Start META Channel");
5957 rc = mMetadataChannel->start();
5958 if (rc < 0) {
5959 LOGE("META channel start failed");
5960 pthread_mutex_unlock(&mMutex);
5961 return rc;
5962 }
5963
5964 if (mAnalysisChannel) {
5965 rc = mAnalysisChannel->start();
5966 if (rc < 0) {
5967 LOGE("Analysis channel start failed");
5968 mMetadataChannel->stop();
5969 pthread_mutex_unlock(&mMutex);
5970 return rc;
5971 }
5972 }
5973
5974 if (mSupportChannel) {
5975 rc = mSupportChannel->start();
5976 if (rc < 0) {
5977 LOGE("Support channel start failed");
5978 mMetadataChannel->stop();
5979 /* Although support and analysis are mutually exclusive today
5980 adding it in anycase for future proofing */
5981 if (mAnalysisChannel) {
5982 mAnalysisChannel->stop();
5983 }
5984 pthread_mutex_unlock(&mMutex);
5985 return rc;
5986 }
5987 }
5988 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5989 it != mStreamInfo.end(); it++) {
5990 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5991 LOGH("Start Processing Channel mask=%d",
5992 channel->getStreamTypeMask());
5993 rc = channel->start();
5994 if (rc < 0) {
5995 LOGE("channel start failed");
5996 pthread_mutex_unlock(&mMutex);
5997 return rc;
5998 }
5999 }
6000
6001 if (mRawDumpChannel) {
6002 LOGD("Starting raw dump stream");
6003 rc = mRawDumpChannel->start();
6004 if (rc != NO_ERROR) {
6005 LOGE("Error Starting Raw Dump Channel");
6006 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6007 it != mStreamInfo.end(); it++) {
6008 QCamera3Channel *channel =
6009 (QCamera3Channel *)(*it)->stream->priv;
6010 LOGH("Stopping Processing Channel mask=%d",
6011 channel->getStreamTypeMask());
6012 channel->stop();
6013 }
6014 if (mSupportChannel)
6015 mSupportChannel->stop();
6016 if (mAnalysisChannel) {
6017 mAnalysisChannel->stop();
6018 }
6019 mMetadataChannel->stop();
6020 pthread_mutex_unlock(&mMutex);
6021 return rc;
6022 }
6023 }
6024
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006025 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006026 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006027 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006028 if (rc != NO_ERROR) {
6029 LOGE("start_channel failed %d", rc);
6030 pthread_mutex_unlock(&mMutex);
6031 return rc;
6032 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006033
6034 {
6035 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006036 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07006037
6038 // Now that sensor mode should have been selected, get the selected sensor mode
6039 // info.
6040 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6041 getCurrentSensorModeInfo(mSensorModeInfo);
6042
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006043 if (EaselManagerClientOpened) {
6044 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006045 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6046 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006047 if (rc != OK) {
6048 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6049 mCameraId, mSensorModeInfo.op_pixel_clk);
6050 pthread_mutex_unlock(&mMutex);
6051 return rc;
6052 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006053 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006054 }
6055 }
6056
6057 // Start sensor streaming.
6058 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6059 mChannelHandle);
6060 if (rc != NO_ERROR) {
6061 LOGE("start_sensor_stream_on failed %d", rc);
6062 pthread_mutex_unlock(&mMutex);
6063 return rc;
6064 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006065 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006066 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006067 }
6068
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006069 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006070 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006071 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006072 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006073 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6074 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6075 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6076 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006077
6078 if (isSessionHdrPlusModeCompatible()) {
6079 rc = enableHdrPlusModeLocked();
6080 if (rc != OK) {
6081 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6082 pthread_mutex_unlock(&mMutex);
6083 return rc;
6084 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006085 }
6086
6087 mFirstPreviewIntentSeen = true;
6088 }
6089 }
6090
Thierry Strudel3d639192016-09-09 11:52:26 -07006091 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6092
6093 mState = STARTED;
6094 // Added a timed condition wait
6095 struct timespec ts;
6096 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006097 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006098 if (rc < 0) {
6099 isValidTimeout = 0;
6100 LOGE("Error reading the real time clock!!");
6101 }
6102 else {
6103 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006104 int64_t timeout = 5;
6105 {
6106 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6107 // If there is a pending HDR+ request, the following requests may be blocked until the
6108 // HDR+ request is done. So allow a longer timeout.
6109 if (mHdrPlusPendingRequests.size() > 0) {
6110 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6111 }
6112 }
6113 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006114 }
6115 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006116 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006117 (mState != ERROR) && (mState != DEINIT)) {
6118 if (!isValidTimeout) {
6119 LOGD("Blocking on conditional wait");
6120 pthread_cond_wait(&mRequestCond, &mMutex);
6121 }
6122 else {
6123 LOGD("Blocking on timed conditional wait");
6124 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6125 if (rc == ETIMEDOUT) {
6126 rc = -ENODEV;
6127 LOGE("Unblocked on timeout!!!!");
6128 break;
6129 }
6130 }
6131 LOGD("Unblocked");
6132 if (mWokenUpByDaemon) {
6133 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006134 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006135 break;
6136 }
6137 }
6138 pthread_mutex_unlock(&mMutex);
6139
6140 return rc;
6141}
6142
6143/*===========================================================================
6144 * FUNCTION : dump
6145 *
6146 * DESCRIPTION:
6147 *
6148 * PARAMETERS :
6149 *
6150 *
6151 * RETURN :
6152 *==========================================================================*/
6153void QCamera3HardwareInterface::dump(int fd)
6154{
6155 pthread_mutex_lock(&mMutex);
6156 dprintf(fd, "\n Camera HAL3 information Begin \n");
6157
6158 dprintf(fd, "\nNumber of pending requests: %zu \n",
6159 mPendingRequestsList.size());
6160 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6161 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6162 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6163 for(pendingRequestIterator i = mPendingRequestsList.begin();
6164 i != mPendingRequestsList.end(); i++) {
6165 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6166 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6167 i->input_buffer);
6168 }
6169 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6170 mPendingBuffersMap.get_num_overall_buffers());
6171 dprintf(fd, "-------+------------------\n");
6172 dprintf(fd, " Frame | Stream type mask \n");
6173 dprintf(fd, "-------+------------------\n");
6174 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6175 for(auto &j : req.mPendingBufferList) {
6176 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6177 dprintf(fd, " %5d | %11d \n",
6178 req.frame_number, channel->getStreamTypeMask());
6179 }
6180 }
6181 dprintf(fd, "-------+------------------\n");
6182
6183 dprintf(fd, "\nPending frame drop list: %zu\n",
6184 mPendingFrameDropList.size());
6185 dprintf(fd, "-------+-----------\n");
6186 dprintf(fd, " Frame | Stream ID \n");
6187 dprintf(fd, "-------+-----------\n");
6188 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6189 i != mPendingFrameDropList.end(); i++) {
6190 dprintf(fd, " %5d | %9d \n",
6191 i->frame_number, i->stream_ID);
6192 }
6193 dprintf(fd, "-------+-----------\n");
6194
6195 dprintf(fd, "\n Camera HAL3 information End \n");
6196
6197 /* use dumpsys media.camera as trigger to send update debug level event */
6198 mUpdateDebugLevel = true;
6199 pthread_mutex_unlock(&mMutex);
6200 return;
6201}
6202
6203/*===========================================================================
6204 * FUNCTION : flush
6205 *
6206 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6207 * conditionally restarts channels
6208 *
6209 * PARAMETERS :
6210 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006211 * @ stopChannelImmediately: stop the channel immediately. This should be used
6212 * when device encountered an error and MIPI may has
6213 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006214 *
6215 * RETURN :
6216 * 0 on success
6217 * Error code on failure
6218 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006219int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006220{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006221 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006222 int32_t rc = NO_ERROR;
6223
6224 LOGD("Unblocking Process Capture Request");
6225 pthread_mutex_lock(&mMutex);
6226 mFlush = true;
6227 pthread_mutex_unlock(&mMutex);
6228
6229 rc = stopAllChannels();
6230 // unlink of dualcam
6231 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006232 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6233 &m_pDualCamCmdPtr->bundle_info;
6234 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006235 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6236 pthread_mutex_lock(&gCamLock);
6237
6238 if (mIsMainCamera == 1) {
6239 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6240 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006241 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006242 // related session id should be session id of linked session
6243 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6244 } else {
6245 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6246 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006247 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006248 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6249 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006250 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006251 pthread_mutex_unlock(&gCamLock);
6252
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006253 rc = mCameraHandle->ops->set_dual_cam_cmd(
6254 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006255 if (rc < 0) {
6256 LOGE("Dualcam: Unlink failed, but still proceed to close");
6257 }
6258 }
6259
6260 if (rc < 0) {
6261 LOGE("stopAllChannels failed");
6262 return rc;
6263 }
6264 if (mChannelHandle) {
6265 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006266 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006267 }
6268
6269 // Reset bundle info
6270 rc = setBundleInfo();
6271 if (rc < 0) {
6272 LOGE("setBundleInfo failed %d", rc);
6273 return rc;
6274 }
6275
6276 // Mutex Lock
6277 pthread_mutex_lock(&mMutex);
6278
6279 // Unblock process_capture_request
6280 mPendingLiveRequest = 0;
6281 pthread_cond_signal(&mRequestCond);
6282
6283 rc = notifyErrorForPendingRequests();
6284 if (rc < 0) {
6285 LOGE("notifyErrorForPendingRequests failed");
6286 pthread_mutex_unlock(&mMutex);
6287 return rc;
6288 }
6289
6290 mFlush = false;
6291
6292 // Start the Streams/Channels
6293 if (restartChannels) {
6294 rc = startAllChannels();
6295 if (rc < 0) {
6296 LOGE("startAllChannels failed");
6297 pthread_mutex_unlock(&mMutex);
6298 return rc;
6299 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006300 if (mChannelHandle) {
6301 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006302 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006303 if (rc < 0) {
6304 LOGE("start_channel failed");
6305 pthread_mutex_unlock(&mMutex);
6306 return rc;
6307 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006308 }
6309 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006310 pthread_mutex_unlock(&mMutex);
6311
6312 return 0;
6313}
6314
6315/*===========================================================================
6316 * FUNCTION : flushPerf
6317 *
6318 * DESCRIPTION: This is the performance optimization version of flush that does
6319 * not use stream off, rather flushes the system
6320 *
6321 * PARAMETERS :
6322 *
6323 *
6324 * RETURN : 0 : success
6325 * -EINVAL: input is malformed (device is not valid)
6326 * -ENODEV: if the device has encountered a serious error
6327 *==========================================================================*/
6328int QCamera3HardwareInterface::flushPerf()
6329{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006330 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006331 int32_t rc = 0;
6332 struct timespec timeout;
6333 bool timed_wait = false;
6334
6335 pthread_mutex_lock(&mMutex);
6336 mFlushPerf = true;
6337 mPendingBuffersMap.numPendingBufsAtFlush =
6338 mPendingBuffersMap.get_num_overall_buffers();
6339 LOGD("Calling flush. Wait for %d buffers to return",
6340 mPendingBuffersMap.numPendingBufsAtFlush);
6341
6342 /* send the flush event to the backend */
6343 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6344 if (rc < 0) {
6345 LOGE("Error in flush: IOCTL failure");
6346 mFlushPerf = false;
6347 pthread_mutex_unlock(&mMutex);
6348 return -ENODEV;
6349 }
6350
6351 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6352 LOGD("No pending buffers in HAL, return flush");
6353 mFlushPerf = false;
6354 pthread_mutex_unlock(&mMutex);
6355 return rc;
6356 }
6357
6358 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006359 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006360 if (rc < 0) {
6361 LOGE("Error reading the real time clock, cannot use timed wait");
6362 } else {
6363 timeout.tv_sec += FLUSH_TIMEOUT;
6364 timed_wait = true;
6365 }
6366
6367 //Block on conditional variable
6368 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6369 LOGD("Waiting on mBuffersCond");
6370 if (!timed_wait) {
6371 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6372 if (rc != 0) {
6373 LOGE("pthread_cond_wait failed due to rc = %s",
6374 strerror(rc));
6375 break;
6376 }
6377 } else {
6378 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6379 if (rc != 0) {
6380 LOGE("pthread_cond_timedwait failed due to rc = %s",
6381 strerror(rc));
6382 break;
6383 }
6384 }
6385 }
6386 if (rc != 0) {
6387 mFlushPerf = false;
6388 pthread_mutex_unlock(&mMutex);
6389 return -ENODEV;
6390 }
6391
6392 LOGD("Received buffers, now safe to return them");
6393
6394 //make sure the channels handle flush
6395 //currently only required for the picture channel to release snapshot resources
6396 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6397 it != mStreamInfo.end(); it++) {
6398 QCamera3Channel *channel = (*it)->channel;
6399 if (channel) {
6400 rc = channel->flush();
6401 if (rc) {
6402 LOGE("Flushing the channels failed with error %d", rc);
6403 // even though the channel flush failed we need to continue and
6404 // return the buffers we have to the framework, however the return
6405 // value will be an error
6406 rc = -ENODEV;
6407 }
6408 }
6409 }
6410
6411 /* notify the frameworks and send errored results */
6412 rc = notifyErrorForPendingRequests();
6413 if (rc < 0) {
6414 LOGE("notifyErrorForPendingRequests failed");
6415 pthread_mutex_unlock(&mMutex);
6416 return rc;
6417 }
6418
6419 //unblock process_capture_request
6420 mPendingLiveRequest = 0;
6421 unblockRequestIfNecessary();
6422
6423 mFlushPerf = false;
6424 pthread_mutex_unlock(&mMutex);
6425 LOGD ("Flush Operation complete. rc = %d", rc);
6426 return rc;
6427}
6428
6429/*===========================================================================
6430 * FUNCTION : handleCameraDeviceError
6431 *
6432 * DESCRIPTION: This function calls internal flush and notifies the error to
6433 * framework and updates the state variable.
6434 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006435 * PARAMETERS :
6436 * @stopChannelImmediately : stop channels immediately without waiting for
6437 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006438 *
6439 * RETURN : NO_ERROR on Success
6440 * Error code on failure
6441 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006442int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006443{
6444 int32_t rc = NO_ERROR;
6445
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006446 {
6447 Mutex::Autolock lock(mFlushLock);
6448 pthread_mutex_lock(&mMutex);
6449 if (mState != ERROR) {
6450 //if mState != ERROR, nothing to be done
6451 pthread_mutex_unlock(&mMutex);
6452 return NO_ERROR;
6453 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006454 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006455
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006456 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006457 if (NO_ERROR != rc) {
6458 LOGE("internal flush to handle mState = ERROR failed");
6459 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006460
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006461 pthread_mutex_lock(&mMutex);
6462 mState = DEINIT;
6463 pthread_mutex_unlock(&mMutex);
6464 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006465
6466 camera3_notify_msg_t notify_msg;
6467 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6468 notify_msg.type = CAMERA3_MSG_ERROR;
6469 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6470 notify_msg.message.error.error_stream = NULL;
6471 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006472 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006473
6474 return rc;
6475}
6476
6477/*===========================================================================
6478 * FUNCTION : captureResultCb
6479 *
6480 * DESCRIPTION: Callback handler for all capture result
6481 * (streams, as well as metadata)
6482 *
6483 * PARAMETERS :
6484 * @metadata : metadata information
6485 * @buffer : actual gralloc buffer to be returned to frameworks.
6486 * NULL if metadata.
6487 *
6488 * RETURN : NONE
6489 *==========================================================================*/
6490void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6491 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6492{
6493 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006494 pthread_mutex_lock(&mMutex);
6495 uint8_t batchSize = mBatchSize;
6496 pthread_mutex_unlock(&mMutex);
6497 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006498 handleBatchMetadata(metadata_buf,
6499 true /* free_and_bufdone_meta_buf */);
6500 } else { /* mBatchSize = 0 */
6501 hdrPlusPerfLock(metadata_buf);
6502 pthread_mutex_lock(&mMutex);
6503 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006504 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006505 true /* last urgent frame of batch metadata */,
6506 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006507 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006508 pthread_mutex_unlock(&mMutex);
6509 }
6510 } else if (isInputBuffer) {
6511 pthread_mutex_lock(&mMutex);
6512 handleInputBufferWithLock(frame_number);
6513 pthread_mutex_unlock(&mMutex);
6514 } else {
6515 pthread_mutex_lock(&mMutex);
6516 handleBufferWithLock(buffer, frame_number);
6517 pthread_mutex_unlock(&mMutex);
6518 }
6519 return;
6520}
6521
6522/*===========================================================================
6523 * FUNCTION : getReprocessibleOutputStreamId
6524 *
6525 * DESCRIPTION: Get source output stream id for the input reprocess stream
6526 * based on size and format, which would be the largest
6527 * output stream if an input stream exists.
6528 *
6529 * PARAMETERS :
6530 * @id : return the stream id if found
6531 *
6532 * RETURN : int32_t type of status
6533 * NO_ERROR -- success
6534 * none-zero failure code
6535 *==========================================================================*/
6536int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6537{
6538 /* check if any output or bidirectional stream with the same size and format
6539 and return that stream */
6540 if ((mInputStreamInfo.dim.width > 0) &&
6541 (mInputStreamInfo.dim.height > 0)) {
6542 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6543 it != mStreamInfo.end(); it++) {
6544
6545 camera3_stream_t *stream = (*it)->stream;
6546 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6547 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6548 (stream->format == mInputStreamInfo.format)) {
6549 // Usage flag for an input stream and the source output stream
6550 // may be different.
6551 LOGD("Found reprocessible output stream! %p", *it);
6552 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6553 stream->usage, mInputStreamInfo.usage);
6554
6555 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6556 if (channel != NULL && channel->mStreams[0]) {
6557 id = channel->mStreams[0]->getMyServerID();
6558 return NO_ERROR;
6559 }
6560 }
6561 }
6562 } else {
6563 LOGD("No input stream, so no reprocessible output stream");
6564 }
6565 return NAME_NOT_FOUND;
6566}
6567
6568/*===========================================================================
6569 * FUNCTION : lookupFwkName
6570 *
6571 * DESCRIPTION: In case the enum is not same in fwk and backend
6572 * make sure the parameter is correctly propogated
6573 *
6574 * PARAMETERS :
6575 * @arr : map between the two enums
6576 * @len : len of the map
6577 * @hal_name : name of the hal_parm to map
6578 *
6579 * RETURN : int type of status
6580 * fwk_name -- success
6581 * none-zero failure code
6582 *==========================================================================*/
6583template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6584 size_t len, halType hal_name)
6585{
6586
6587 for (size_t i = 0; i < len; i++) {
6588 if (arr[i].hal_name == hal_name) {
6589 return arr[i].fwk_name;
6590 }
6591 }
6592
6593 /* Not able to find matching framework type is not necessarily
6594 * an error case. This happens when mm-camera supports more attributes
6595 * than the frameworks do */
6596 LOGH("Cannot find matching framework type");
6597 return NAME_NOT_FOUND;
6598}
6599
6600/*===========================================================================
6601 * FUNCTION : lookupHalName
6602 *
6603 * DESCRIPTION: In case the enum is not same in fwk and backend
6604 * make sure the parameter is correctly propogated
6605 *
6606 * PARAMETERS :
6607 * @arr : map between the two enums
6608 * @len : len of the map
6609 * @fwk_name : name of the hal_parm to map
6610 *
6611 * RETURN : int32_t type of status
6612 * hal_name -- success
6613 * none-zero failure code
6614 *==========================================================================*/
6615template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6616 size_t len, fwkType fwk_name)
6617{
6618 for (size_t i = 0; i < len; i++) {
6619 if (arr[i].fwk_name == fwk_name) {
6620 return arr[i].hal_name;
6621 }
6622 }
6623
6624 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6625 return NAME_NOT_FOUND;
6626}
6627
6628/*===========================================================================
6629 * FUNCTION : lookupProp
6630 *
6631 * DESCRIPTION: lookup a value by its name
6632 *
6633 * PARAMETERS :
6634 * @arr : map between the two enums
6635 * @len : size of the map
6636 * @name : name to be looked up
6637 *
6638 * RETURN : Value if found
6639 * CAM_CDS_MODE_MAX if not found
6640 *==========================================================================*/
6641template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6642 size_t len, const char *name)
6643{
6644 if (name) {
6645 for (size_t i = 0; i < len; i++) {
6646 if (!strcmp(arr[i].desc, name)) {
6647 return arr[i].val;
6648 }
6649 }
6650 }
6651 return CAM_CDS_MODE_MAX;
6652}
6653
6654/*===========================================================================
6655 *
6656 * DESCRIPTION:
6657 *
6658 * PARAMETERS :
6659 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006660 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006661 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006662 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6663 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006664 *
6665 * RETURN : camera_metadata_t*
6666 * metadata in a format specified by fwk
6667 *==========================================================================*/
6668camera_metadata_t*
6669QCamera3HardwareInterface::translateFromHalMetadata(
6670 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006671 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006672 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006673 bool lastMetadataInBatch,
6674 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006675{
6676 CameraMetadata camMetadata;
6677 camera_metadata_t *resultMetadata;
6678
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006679 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006680 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6681 * Timestamp is needed because it's used for shutter notify calculation.
6682 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006683 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006684 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006685 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006686 }
6687
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006688 if (pendingRequest.jpegMetadata.entryCount())
6689 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006690
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006691 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6692 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6693 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6694 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6695 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006696 if (mBatchSize == 0) {
6697 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006698 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006699 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006700
Samuel Ha68ba5172016-12-15 18:41:12 -08006701 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6702 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006703 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006704 // DevCamDebug metadata translateFromHalMetadata AF
6705 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6706 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6707 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6708 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6709 }
6710 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6711 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6712 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6713 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6714 }
6715 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6716 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6717 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6718 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6719 }
6720 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6721 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6722 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6723 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6724 }
6725 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6726 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6727 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6728 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6729 }
6730 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6731 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6732 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6733 *DevCamDebug_af_monitor_pdaf_target_pos;
6734 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6735 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6736 }
6737 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6738 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6739 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6740 *DevCamDebug_af_monitor_pdaf_confidence;
6741 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6742 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6743 }
6744 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6745 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6746 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6747 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6748 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6749 }
6750 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6751 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6752 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6753 *DevCamDebug_af_monitor_tof_target_pos;
6754 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6755 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6756 }
6757 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6758 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6759 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6760 *DevCamDebug_af_monitor_tof_confidence;
6761 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6762 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6763 }
6764 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6765 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6766 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6767 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6768 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6769 }
6770 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6771 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6772 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6773 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6774 &fwk_DevCamDebug_af_monitor_type_select, 1);
6775 }
6776 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6777 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6778 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6779 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6780 &fwk_DevCamDebug_af_monitor_refocus, 1);
6781 }
6782 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6783 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6784 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6785 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6786 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6787 }
6788 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6789 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6790 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6791 *DevCamDebug_af_search_pdaf_target_pos;
6792 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6793 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6794 }
6795 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6796 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6797 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6798 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6799 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6800 }
6801 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6802 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6803 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6804 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6805 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6806 }
6807 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6808 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6809 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6810 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6811 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6812 }
6813 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6814 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6815 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6816 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6817 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6818 }
6819 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6820 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6821 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6822 *DevCamDebug_af_search_tof_target_pos;
6823 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6824 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6825 }
6826 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6827 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6828 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6829 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6830 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6831 }
6832 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6833 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6834 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6835 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6836 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6837 }
6838 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6839 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6840 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6841 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6842 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6843 }
6844 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6845 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6846 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6847 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6848 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6849 }
6850 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6851 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6852 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6853 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6854 &fwk_DevCamDebug_af_search_type_select, 1);
6855 }
6856 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6857 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6858 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6859 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6860 &fwk_DevCamDebug_af_search_next_pos, 1);
6861 }
6862 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6863 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6864 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6865 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6866 &fwk_DevCamDebug_af_search_target_pos, 1);
6867 }
6868 // DevCamDebug metadata translateFromHalMetadata AEC
6869 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6870 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6871 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6872 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6873 }
6874 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6875 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6876 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6877 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6878 }
6879 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6880 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6881 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6882 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6883 }
6884 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6885 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6886 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6887 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6888 }
6889 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6890 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6891 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6892 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6893 }
6894 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6895 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6896 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6897 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6898 }
6899 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6900 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6901 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6902 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6903 }
6904 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6905 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6906 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6907 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6908 }
Samuel Ha34229982017-02-17 13:51:11 -08006909 // DevCamDebug metadata translateFromHalMetadata zzHDR
6910 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6911 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6912 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6913 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6914 }
6915 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6916 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006917 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006918 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6919 }
6920 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6921 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6922 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6923 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6924 }
6925 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6926 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006927 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006928 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6929 }
6930 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6931 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6932 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6933 *DevCamDebug_aec_hdr_sensitivity_ratio;
6934 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6935 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6936 }
6937 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6938 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6939 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6940 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6941 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6942 }
6943 // DevCamDebug metadata translateFromHalMetadata ADRC
6944 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6945 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6946 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6947 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6948 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6949 }
6950 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6951 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6952 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6953 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6954 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6955 }
6956 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6957 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6958 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6959 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6960 }
6961 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6962 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6963 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6964 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6965 }
6966 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6967 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6968 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6969 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6970 }
6971 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6972 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6973 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6974 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6975 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006976 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6977 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6978 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6979 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6980 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6981 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6982 }
6983 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6984 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6985 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6986 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6987 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6988 }
6989 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6990 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6991 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6992 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6993 &fwk_DevCamDebug_aec_subject_motion, 1);
6994 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006995 // DevCamDebug metadata translateFromHalMetadata AWB
6996 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6997 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6998 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6999 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7000 }
7001 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7002 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7003 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7004 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7005 }
7006 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7007 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7008 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7009 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7010 }
7011 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7012 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7013 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7014 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7015 }
7016 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7017 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7018 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7019 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7020 }
7021 }
7022 // atrace_end(ATRACE_TAG_ALWAYS);
7023
Thierry Strudel3d639192016-09-09 11:52:26 -07007024 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7025 int64_t fwk_frame_number = *frame_number;
7026 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7027 }
7028
7029 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7030 int32_t fps_range[2];
7031 fps_range[0] = (int32_t)float_range->min_fps;
7032 fps_range[1] = (int32_t)float_range->max_fps;
7033 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7034 fps_range, 2);
7035 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7036 fps_range[0], fps_range[1]);
7037 }
7038
7039 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7040 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7041 }
7042
7043 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7044 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7045 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7046 *sceneMode);
7047 if (NAME_NOT_FOUND != val) {
7048 uint8_t fwkSceneMode = (uint8_t)val;
7049 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7050 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7051 fwkSceneMode);
7052 }
7053 }
7054
7055 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7056 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7057 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7058 }
7059
7060 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7061 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7062 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7063 }
7064
7065 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7066 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7067 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7068 }
7069
7070 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7071 CAM_INTF_META_EDGE_MODE, metadata) {
7072 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7073 }
7074
7075 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7076 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7077 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7078 }
7079
7080 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7081 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7082 }
7083
7084 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7085 if (0 <= *flashState) {
7086 uint8_t fwk_flashState = (uint8_t) *flashState;
7087 if (!gCamCapability[mCameraId]->flash_available) {
7088 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7089 }
7090 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7091 }
7092 }
7093
7094 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7095 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7096 if (NAME_NOT_FOUND != val) {
7097 uint8_t fwk_flashMode = (uint8_t)val;
7098 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7099 }
7100 }
7101
7102 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7103 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7104 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7105 }
7106
7107 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7108 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7109 }
7110
7111 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7112 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7113 }
7114
7115 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7116 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7117 }
7118
7119 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7120 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7121 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7122 }
7123
7124 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7125 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7126 LOGD("fwk_videoStab = %d", fwk_videoStab);
7127 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7128 } else {
7129 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7130 // and so hardcoding the Video Stab result to OFF mode.
7131 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7132 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007133 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007134 }
7135
7136 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7137 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7138 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7139 }
7140
7141 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7142 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7143 }
7144
Thierry Strudel3d639192016-09-09 11:52:26 -07007145 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7146 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007147 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007148
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007149 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7150 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007151
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007152 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007153 blackLevelAppliedPattern->cam_black_level[0],
7154 blackLevelAppliedPattern->cam_black_level[1],
7155 blackLevelAppliedPattern->cam_black_level[2],
7156 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007157 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7158 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007159
7160#ifndef USE_HAL_3_3
7161 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307162 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007163 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307164 fwk_blackLevelInd[0] /= 16.0;
7165 fwk_blackLevelInd[1] /= 16.0;
7166 fwk_blackLevelInd[2] /= 16.0;
7167 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007168 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7169 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007170#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007171 }
7172
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007173#ifndef USE_HAL_3_3
7174 // Fixed whitelevel is used by ISP/Sensor
7175 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7176 &gCamCapability[mCameraId]->white_level, 1);
7177#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007178
7179 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7180 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7181 int32_t scalerCropRegion[4];
7182 scalerCropRegion[0] = hScalerCropRegion->left;
7183 scalerCropRegion[1] = hScalerCropRegion->top;
7184 scalerCropRegion[2] = hScalerCropRegion->width;
7185 scalerCropRegion[3] = hScalerCropRegion->height;
7186
7187 // Adjust crop region from sensor output coordinate system to active
7188 // array coordinate system.
7189 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7190 scalerCropRegion[2], scalerCropRegion[3]);
7191
7192 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7193 }
7194
7195 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7196 LOGD("sensorExpTime = %lld", *sensorExpTime);
7197 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7198 }
7199
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007200 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7201 LOGD("expTimeBoost = %f", *expTimeBoost);
7202 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7203 }
7204
Thierry Strudel3d639192016-09-09 11:52:26 -07007205 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7206 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7207 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7208 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7209 }
7210
7211 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7212 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7213 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7214 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7215 sensorRollingShutterSkew, 1);
7216 }
7217
7218 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7219 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7220 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7221
7222 //calculate the noise profile based on sensitivity
7223 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7224 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7225 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7226 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7227 noise_profile[i] = noise_profile_S;
7228 noise_profile[i+1] = noise_profile_O;
7229 }
7230 LOGD("noise model entry (S, O) is (%f, %f)",
7231 noise_profile_S, noise_profile_O);
7232 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7233 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7234 }
7235
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007236#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007237 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007238 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007239 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007240 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007241 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7242 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7243 }
7244 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007245#endif
7246
Thierry Strudel3d639192016-09-09 11:52:26 -07007247 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7248 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7249 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7250 }
7251
7252 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7253 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7254 *faceDetectMode);
7255 if (NAME_NOT_FOUND != val) {
7256 uint8_t fwk_faceDetectMode = (uint8_t)val;
7257 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7258
7259 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7260 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7261 CAM_INTF_META_FACE_DETECTION, metadata) {
7262 uint8_t numFaces = MIN(
7263 faceDetectionInfo->num_faces_detected, MAX_ROI);
7264 int32_t faceIds[MAX_ROI];
7265 uint8_t faceScores[MAX_ROI];
7266 int32_t faceRectangles[MAX_ROI * 4];
7267 int32_t faceLandmarks[MAX_ROI * 6];
7268 size_t j = 0, k = 0;
7269
7270 for (size_t i = 0; i < numFaces; i++) {
7271 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7272 // Adjust crop region from sensor output coordinate system to active
7273 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007274 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007275 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7276 rect.width, rect.height);
7277
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007278 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007279
Jason Lee8ce36fa2017-04-19 19:40:37 -07007280 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7281 "bottom-right (%d, %d)",
7282 faceDetectionInfo->frame_id, i,
7283 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7284 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7285
Thierry Strudel3d639192016-09-09 11:52:26 -07007286 j+= 4;
7287 }
7288 if (numFaces <= 0) {
7289 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7290 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7291 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7292 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7293 }
7294
7295 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7296 numFaces);
7297 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7298 faceRectangles, numFaces * 4U);
7299 if (fwk_faceDetectMode ==
7300 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7301 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7302 CAM_INTF_META_FACE_LANDMARK, metadata) {
7303
7304 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007305 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007306 // Map the co-ordinate sensor output coordinate system to active
7307 // array coordinate system.
7308 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007309 face_landmarks.left_eye_center.x,
7310 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007311 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007312 face_landmarks.right_eye_center.x,
7313 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007314 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007315 face_landmarks.mouth_center.x,
7316 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007317
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007318 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007319
7320 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7321 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7322 faceDetectionInfo->frame_id, i,
7323 faceLandmarks[k + LEFT_EYE_X],
7324 faceLandmarks[k + LEFT_EYE_Y],
7325 faceLandmarks[k + RIGHT_EYE_X],
7326 faceLandmarks[k + RIGHT_EYE_Y],
7327 faceLandmarks[k + MOUTH_X],
7328 faceLandmarks[k + MOUTH_Y]);
7329
Thierry Strudel04e026f2016-10-10 11:27:36 -07007330 k+= TOTAL_LANDMARK_INDICES;
7331 }
7332 } else {
7333 for (size_t i = 0; i < numFaces; i++) {
7334 setInvalidLandmarks(faceLandmarks+k);
7335 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007336 }
7337 }
7338
Jason Lee49619db2017-04-13 12:07:22 -07007339 for (size_t i = 0; i < numFaces; i++) {
7340 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7341
7342 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7343 faceDetectionInfo->frame_id, i, faceIds[i]);
7344 }
7345
Thierry Strudel3d639192016-09-09 11:52:26 -07007346 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7347 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7348 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007349 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007350 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7351 CAM_INTF_META_FACE_BLINK, metadata) {
7352 uint8_t detected[MAX_ROI];
7353 uint8_t degree[MAX_ROI * 2];
7354 for (size_t i = 0; i < numFaces; i++) {
7355 detected[i] = blinks->blink[i].blink_detected;
7356 degree[2 * i] = blinks->blink[i].left_blink;
7357 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007358
Jason Lee49619db2017-04-13 12:07:22 -07007359 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7360 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7361 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7362 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007363 }
7364 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7365 detected, numFaces);
7366 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7367 degree, numFaces * 2);
7368 }
7369 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7370 CAM_INTF_META_FACE_SMILE, metadata) {
7371 uint8_t degree[MAX_ROI];
7372 uint8_t confidence[MAX_ROI];
7373 for (size_t i = 0; i < numFaces; i++) {
7374 degree[i] = smiles->smile[i].smile_degree;
7375 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007376
Jason Lee49619db2017-04-13 12:07:22 -07007377 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7378 "smile_degree=%d, smile_score=%d",
7379 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007380 }
7381 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7382 degree, numFaces);
7383 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7384 confidence, numFaces);
7385 }
7386 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7387 CAM_INTF_META_FACE_GAZE, metadata) {
7388 int8_t angle[MAX_ROI];
7389 int32_t direction[MAX_ROI * 3];
7390 int8_t degree[MAX_ROI * 2];
7391 for (size_t i = 0; i < numFaces; i++) {
7392 angle[i] = gazes->gaze[i].gaze_angle;
7393 direction[3 * i] = gazes->gaze[i].updown_dir;
7394 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7395 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7396 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7397 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007398
7399 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7400 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7401 "left_right_gaze=%d, top_bottom_gaze=%d",
7402 faceDetectionInfo->frame_id, i, angle[i],
7403 direction[3 * i], direction[3 * i + 1],
7404 direction[3 * i + 2],
7405 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007406 }
7407 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7408 (uint8_t *)angle, numFaces);
7409 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7410 direction, numFaces * 3);
7411 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7412 (uint8_t *)degree, numFaces * 2);
7413 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007414 }
7415 }
7416 }
7417 }
7418
7419 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7420 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007421 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007422 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007423 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007424
Shuzhen Wang14415f52016-11-16 18:26:18 -08007425 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7426 histogramBins = *histBins;
7427 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7428 }
7429
7430 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007431 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7432 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007433 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007434
7435 switch (stats_data->type) {
7436 case CAM_HISTOGRAM_TYPE_BAYER:
7437 switch (stats_data->bayer_stats.data_type) {
7438 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007439 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7440 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007441 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007442 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7443 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007444 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007445 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7446 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007447 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007448 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007449 case CAM_STATS_CHANNEL_R:
7450 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007451 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7452 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007453 }
7454 break;
7455 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007456 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007457 break;
7458 }
7459
Shuzhen Wang14415f52016-11-16 18:26:18 -08007460 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007461 }
7462 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007463 }
7464
7465 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7466 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7467 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7468 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7469 }
7470
7471 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7472 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7473 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7474 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7475 }
7476
7477 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7478 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7479 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7480 CAM_MAX_SHADING_MAP_HEIGHT);
7481 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7482 CAM_MAX_SHADING_MAP_WIDTH);
7483 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7484 lensShadingMap->lens_shading, 4U * map_width * map_height);
7485 }
7486
7487 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7488 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7489 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7490 }
7491
7492 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7493 //Populate CAM_INTF_META_TONEMAP_CURVES
7494 /* ch0 = G, ch 1 = B, ch 2 = R*/
7495 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7496 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7497 tonemap->tonemap_points_cnt,
7498 CAM_MAX_TONEMAP_CURVE_SIZE);
7499 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7500 }
7501
7502 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7503 &tonemap->curves[0].tonemap_points[0][0],
7504 tonemap->tonemap_points_cnt * 2);
7505
7506 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7507 &tonemap->curves[1].tonemap_points[0][0],
7508 tonemap->tonemap_points_cnt * 2);
7509
7510 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7511 &tonemap->curves[2].tonemap_points[0][0],
7512 tonemap->tonemap_points_cnt * 2);
7513 }
7514
7515 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7516 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7517 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7518 CC_GAIN_MAX);
7519 }
7520
7521 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7522 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7523 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7524 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7525 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7526 }
7527
7528 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7529 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7530 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7531 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7532 toneCurve->tonemap_points_cnt,
7533 CAM_MAX_TONEMAP_CURVE_SIZE);
7534 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7535 }
7536 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7537 (float*)toneCurve->curve.tonemap_points,
7538 toneCurve->tonemap_points_cnt * 2);
7539 }
7540
7541 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7542 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7543 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7544 predColorCorrectionGains->gains, 4);
7545 }
7546
7547 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7548 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7549 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7550 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7551 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7552 }
7553
7554 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7555 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7556 }
7557
7558 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7559 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7560 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7561 }
7562
7563 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7564 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7565 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7566 }
7567
7568 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7569 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7570 *effectMode);
7571 if (NAME_NOT_FOUND != val) {
7572 uint8_t fwk_effectMode = (uint8_t)val;
7573 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7574 }
7575 }
7576
7577 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7578 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7579 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7580 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7581 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7582 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7583 }
7584 int32_t fwk_testPatternData[4];
7585 fwk_testPatternData[0] = testPatternData->r;
7586 fwk_testPatternData[3] = testPatternData->b;
7587 switch (gCamCapability[mCameraId]->color_arrangement) {
7588 case CAM_FILTER_ARRANGEMENT_RGGB:
7589 case CAM_FILTER_ARRANGEMENT_GRBG:
7590 fwk_testPatternData[1] = testPatternData->gr;
7591 fwk_testPatternData[2] = testPatternData->gb;
7592 break;
7593 case CAM_FILTER_ARRANGEMENT_GBRG:
7594 case CAM_FILTER_ARRANGEMENT_BGGR:
7595 fwk_testPatternData[2] = testPatternData->gr;
7596 fwk_testPatternData[1] = testPatternData->gb;
7597 break;
7598 default:
7599 LOGE("color arrangement %d is not supported",
7600 gCamCapability[mCameraId]->color_arrangement);
7601 break;
7602 }
7603 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7604 }
7605
7606 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7607 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7608 }
7609
7610 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7611 String8 str((const char *)gps_methods);
7612 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7613 }
7614
7615 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7616 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7617 }
7618
7619 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7620 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7621 }
7622
7623 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7624 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7625 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7626 }
7627
7628 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7629 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7630 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7631 }
7632
7633 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7634 int32_t fwk_thumb_size[2];
7635 fwk_thumb_size[0] = thumb_size->width;
7636 fwk_thumb_size[1] = thumb_size->height;
7637 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7638 }
7639
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007640 // Skip reprocess metadata if there is no input stream.
7641 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7642 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7643 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7644 privateData,
7645 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7646 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007647 }
7648
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007649 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007650 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007651 meteringMode, 1);
7652 }
7653
Thierry Strudel54dc9782017-02-15 12:12:10 -08007654 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7655 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7656 LOGD("hdr_scene_data: %d %f\n",
7657 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7658 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7659 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7660 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7661 &isHdr, 1);
7662 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7663 &isHdrConfidence, 1);
7664 }
7665
7666
7667
Thierry Strudel3d639192016-09-09 11:52:26 -07007668 if (metadata->is_tuning_params_valid) {
7669 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7670 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7671 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7672
7673
7674 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7675 sizeof(uint32_t));
7676 data += sizeof(uint32_t);
7677
7678 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7679 sizeof(uint32_t));
7680 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7681 data += sizeof(uint32_t);
7682
7683 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7684 sizeof(uint32_t));
7685 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7686 data += sizeof(uint32_t);
7687
7688 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7689 sizeof(uint32_t));
7690 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7691 data += sizeof(uint32_t);
7692
7693 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7694 sizeof(uint32_t));
7695 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7696 data += sizeof(uint32_t);
7697
7698 metadata->tuning_params.tuning_mod3_data_size = 0;
7699 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7700 sizeof(uint32_t));
7701 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7702 data += sizeof(uint32_t);
7703
7704 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7705 TUNING_SENSOR_DATA_MAX);
7706 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7707 count);
7708 data += count;
7709
7710 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7711 TUNING_VFE_DATA_MAX);
7712 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7713 count);
7714 data += count;
7715
7716 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7717 TUNING_CPP_DATA_MAX);
7718 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7719 count);
7720 data += count;
7721
7722 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7723 TUNING_CAC_DATA_MAX);
7724 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7725 count);
7726 data += count;
7727
7728 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7729 (int32_t *)(void *)tuning_meta_data_blob,
7730 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7731 }
7732
7733 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7734 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7735 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7736 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7737 NEUTRAL_COL_POINTS);
7738 }
7739
7740 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7741 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7742 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7743 }
7744
7745 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7746 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7747 // Adjust crop region from sensor output coordinate system to active
7748 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007749 cam_rect_t hAeRect = hAeRegions->rect;
7750 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7751 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007752
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007753 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007754 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7755 REGIONS_TUPLE_COUNT);
7756 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7757 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007758 hAeRect.left, hAeRect.top, hAeRect.width,
7759 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007760 }
7761
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007762 if (!pendingRequest.focusStateSent) {
7763 if (pendingRequest.focusStateValid) {
7764 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7765 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007766 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007767 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7768 uint8_t fwk_afState = (uint8_t) *afState;
7769 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7770 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7771 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007772 }
7773 }
7774
Thierry Strudel3d639192016-09-09 11:52:26 -07007775 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7776 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7777 }
7778
7779 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7780 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7781 }
7782
7783 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7784 uint8_t fwk_lensState = *lensState;
7785 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7786 }
7787
Thierry Strudel3d639192016-09-09 11:52:26 -07007788 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007789 uint32_t ab_mode = *hal_ab_mode;
7790 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7791 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7792 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7793 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007794 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007795 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007796 if (NAME_NOT_FOUND != val) {
7797 uint8_t fwk_ab_mode = (uint8_t)val;
7798 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7799 }
7800 }
7801
7802 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7803 int val = lookupFwkName(SCENE_MODES_MAP,
7804 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7805 if (NAME_NOT_FOUND != val) {
7806 uint8_t fwkBestshotMode = (uint8_t)val;
7807 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7808 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7809 } else {
7810 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7811 }
7812 }
7813
7814 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7815 uint8_t fwk_mode = (uint8_t) *mode;
7816 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7817 }
7818
7819 /* Constant metadata values to be update*/
7820 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7821 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7822
7823 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7824 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7825
7826 int32_t hotPixelMap[2];
7827 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7828
7829 // CDS
7830 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7831 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7832 }
7833
Thierry Strudel04e026f2016-10-10 11:27:36 -07007834 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7835 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007836 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007837 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7838 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7839 } else {
7840 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7841 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007842
7843 if(fwk_hdr != curr_hdr_state) {
7844 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7845 if(fwk_hdr)
7846 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7847 else
7848 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7849 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007850 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7851 }
7852
Thierry Strudel54dc9782017-02-15 12:12:10 -08007853 //binning correction
7854 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7855 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7856 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7857 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7858 }
7859
Thierry Strudel04e026f2016-10-10 11:27:36 -07007860 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007861 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007862 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7863 int8_t is_ir_on = 0;
7864
7865 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7866 if(is_ir_on != curr_ir_state) {
7867 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7868 if(is_ir_on)
7869 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7870 else
7871 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7872 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007873 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007874 }
7875
Thierry Strudel269c81a2016-10-12 12:13:59 -07007876 // AEC SPEED
7877 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7878 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7879 }
7880
7881 // AWB SPEED
7882 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7883 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7884 }
7885
Thierry Strudel3d639192016-09-09 11:52:26 -07007886 // TNR
7887 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7888 uint8_t tnr_enable = tnr->denoise_enable;
7889 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007890 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7891 int8_t is_tnr_on = 0;
7892
7893 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7894 if(is_tnr_on != curr_tnr_state) {
7895 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7896 if(is_tnr_on)
7897 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7898 else
7899 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7900 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007901
7902 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7903 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7904 }
7905
7906 // Reprocess crop data
7907 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7908 uint8_t cnt = crop_data->num_of_streams;
7909 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7910 // mm-qcamera-daemon only posts crop_data for streams
7911 // not linked to pproc. So no valid crop metadata is not
7912 // necessarily an error case.
7913 LOGD("No valid crop metadata entries");
7914 } else {
7915 uint32_t reproc_stream_id;
7916 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7917 LOGD("No reprocessible stream found, ignore crop data");
7918 } else {
7919 int rc = NO_ERROR;
7920 Vector<int32_t> roi_map;
7921 int32_t *crop = new int32_t[cnt*4];
7922 if (NULL == crop) {
7923 rc = NO_MEMORY;
7924 }
7925 if (NO_ERROR == rc) {
7926 int32_t streams_found = 0;
7927 for (size_t i = 0; i < cnt; i++) {
7928 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7929 if (pprocDone) {
7930 // HAL already does internal reprocessing,
7931 // either via reprocessing before JPEG encoding,
7932 // or offline postprocessing for pproc bypass case.
7933 crop[0] = 0;
7934 crop[1] = 0;
7935 crop[2] = mInputStreamInfo.dim.width;
7936 crop[3] = mInputStreamInfo.dim.height;
7937 } else {
7938 crop[0] = crop_data->crop_info[i].crop.left;
7939 crop[1] = crop_data->crop_info[i].crop.top;
7940 crop[2] = crop_data->crop_info[i].crop.width;
7941 crop[3] = crop_data->crop_info[i].crop.height;
7942 }
7943 roi_map.add(crop_data->crop_info[i].roi_map.left);
7944 roi_map.add(crop_data->crop_info[i].roi_map.top);
7945 roi_map.add(crop_data->crop_info[i].roi_map.width);
7946 roi_map.add(crop_data->crop_info[i].roi_map.height);
7947 streams_found++;
7948 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7949 crop[0], crop[1], crop[2], crop[3]);
7950 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7951 crop_data->crop_info[i].roi_map.left,
7952 crop_data->crop_info[i].roi_map.top,
7953 crop_data->crop_info[i].roi_map.width,
7954 crop_data->crop_info[i].roi_map.height);
7955 break;
7956
7957 }
7958 }
7959 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7960 &streams_found, 1);
7961 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7962 crop, (size_t)(streams_found * 4));
7963 if (roi_map.array()) {
7964 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7965 roi_map.array(), roi_map.size());
7966 }
7967 }
7968 if (crop) {
7969 delete [] crop;
7970 }
7971 }
7972 }
7973 }
7974
7975 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7976 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7977 // so hardcoding the CAC result to OFF mode.
7978 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7979 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7980 } else {
7981 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7982 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7983 *cacMode);
7984 if (NAME_NOT_FOUND != val) {
7985 uint8_t resultCacMode = (uint8_t)val;
7986 // check whether CAC result from CB is equal to Framework set CAC mode
7987 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007988 if (pendingRequest.fwkCacMode != resultCacMode) {
7989 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007990 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007991 //Check if CAC is disabled by property
7992 if (m_cacModeDisabled) {
7993 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7994 }
7995
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007996 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007997 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7998 } else {
7999 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8000 }
8001 }
8002 }
8003
8004 // Post blob of cam_cds_data through vendor tag.
8005 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8006 uint8_t cnt = cdsInfo->num_of_streams;
8007 cam_cds_data_t cdsDataOverride;
8008 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8009 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8010 cdsDataOverride.num_of_streams = 1;
8011 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8012 uint32_t reproc_stream_id;
8013 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8014 LOGD("No reprocessible stream found, ignore cds data");
8015 } else {
8016 for (size_t i = 0; i < cnt; i++) {
8017 if (cdsInfo->cds_info[i].stream_id ==
8018 reproc_stream_id) {
8019 cdsDataOverride.cds_info[0].cds_enable =
8020 cdsInfo->cds_info[i].cds_enable;
8021 break;
8022 }
8023 }
8024 }
8025 } else {
8026 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8027 }
8028 camMetadata.update(QCAMERA3_CDS_INFO,
8029 (uint8_t *)&cdsDataOverride,
8030 sizeof(cam_cds_data_t));
8031 }
8032
8033 // Ldaf calibration data
8034 if (!mLdafCalibExist) {
8035 IF_META_AVAILABLE(uint32_t, ldafCalib,
8036 CAM_INTF_META_LDAF_EXIF, metadata) {
8037 mLdafCalibExist = true;
8038 mLdafCalib[0] = ldafCalib[0];
8039 mLdafCalib[1] = ldafCalib[1];
8040 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8041 ldafCalib[0], ldafCalib[1]);
8042 }
8043 }
8044
Thierry Strudel54dc9782017-02-15 12:12:10 -08008045 // EXIF debug data through vendor tag
8046 /*
8047 * Mobicat Mask can assume 3 values:
8048 * 1 refers to Mobicat data,
8049 * 2 refers to Stats Debug and Exif Debug Data
8050 * 3 refers to Mobicat and Stats Debug Data
8051 * We want to make sure that we are sending Exif debug data
8052 * only when Mobicat Mask is 2.
8053 */
8054 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8055 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8056 (uint8_t *)(void *)mExifParams.debug_params,
8057 sizeof(mm_jpeg_debug_exif_params_t));
8058 }
8059
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008060 // Reprocess and DDM debug data through vendor tag
8061 cam_reprocess_info_t repro_info;
8062 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008063 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8064 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008065 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008066 }
8067 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8068 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008069 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008070 }
8071 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8072 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008073 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008074 }
8075 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8076 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008077 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008078 }
8079 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8080 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008081 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008082 }
8083 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008084 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008085 }
8086 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8087 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008088 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008089 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008090 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8091 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8092 }
8093 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8094 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8095 }
8096 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8097 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008098
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008099 // INSTANT AEC MODE
8100 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8101 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8102 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8103 }
8104
Shuzhen Wange763e802016-03-31 10:24:29 -07008105 // AF scene change
8106 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8107 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8108 }
8109
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008110 // Enable ZSL
8111 if (enableZsl != nullptr) {
8112 uint8_t value = *enableZsl ?
8113 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8114 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8115 }
8116
Xu Han821ea9c2017-05-23 09:00:40 -07008117 // OIS Data
8118 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8119 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8120 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8121 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8122 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8123 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8124 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8125 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8126 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8127 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8128 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008129 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8130 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8131 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8132 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008133 }
8134
Thierry Strudel3d639192016-09-09 11:52:26 -07008135 resultMetadata = camMetadata.release();
8136 return resultMetadata;
8137}
8138
8139/*===========================================================================
8140 * FUNCTION : saveExifParams
8141 *
8142 * DESCRIPTION:
8143 *
8144 * PARAMETERS :
8145 * @metadata : metadata information from callback
8146 *
8147 * RETURN : none
8148 *
8149 *==========================================================================*/
8150void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8151{
8152 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8153 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8154 if (mExifParams.debug_params) {
8155 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8156 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8157 }
8158 }
8159 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8160 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8161 if (mExifParams.debug_params) {
8162 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8163 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8164 }
8165 }
8166 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8167 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8168 if (mExifParams.debug_params) {
8169 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8170 mExifParams.debug_params->af_debug_params_valid = TRUE;
8171 }
8172 }
8173 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8174 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8175 if (mExifParams.debug_params) {
8176 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8177 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8178 }
8179 }
8180 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8181 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8182 if (mExifParams.debug_params) {
8183 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8184 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8185 }
8186 }
8187 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8188 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8189 if (mExifParams.debug_params) {
8190 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8191 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8192 }
8193 }
8194 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8195 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8196 if (mExifParams.debug_params) {
8197 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8198 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8199 }
8200 }
8201 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8202 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8203 if (mExifParams.debug_params) {
8204 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8205 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8206 }
8207 }
8208}
8209
8210/*===========================================================================
8211 * FUNCTION : get3AExifParams
8212 *
8213 * DESCRIPTION:
8214 *
8215 * PARAMETERS : none
8216 *
8217 *
8218 * RETURN : mm_jpeg_exif_params_t
8219 *
8220 *==========================================================================*/
8221mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8222{
8223 return mExifParams;
8224}
8225
8226/*===========================================================================
8227 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8228 *
8229 * DESCRIPTION:
8230 *
8231 * PARAMETERS :
8232 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008233 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8234 * urgent metadata in a batch. Always true for
8235 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008236 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008237 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8238 * i.e. even though it doesn't map to a valid partial
8239 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008240 * RETURN : camera_metadata_t*
8241 * metadata in a format specified by fwk
8242 *==========================================================================*/
8243camera_metadata_t*
8244QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008245 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008246 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008247{
8248 CameraMetadata camMetadata;
8249 camera_metadata_t *resultMetadata;
8250
Shuzhen Wang485e2442017-08-02 12:21:08 -07008251 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008252 /* In batch mode, use empty metadata if this is not the last in batch
8253 */
8254 resultMetadata = allocate_camera_metadata(0, 0);
8255 return resultMetadata;
8256 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008257
8258 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8259 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8260 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8261 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8262 }
8263
8264 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8265 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8266 &aecTrigger->trigger, 1);
8267 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8268 &aecTrigger->trigger_id, 1);
8269 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8270 aecTrigger->trigger);
8271 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8272 aecTrigger->trigger_id);
8273 }
8274
8275 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8276 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8277 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8278 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8279 }
8280
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008281 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8282 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8283 if (NAME_NOT_FOUND != val) {
8284 uint8_t fwkAfMode = (uint8_t)val;
8285 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8286 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8287 } else {
8288 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8289 val);
8290 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008291 }
8292
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008293 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8294 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8295 af_trigger->trigger);
8296 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8297 af_trigger->trigger_id);
8298
8299 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8300 mAfTrigger = *af_trigger;
8301 uint32_t fwk_AfState = (uint32_t) *afState;
8302
8303 // If this is the result for a new trigger, check if there is new early
8304 // af state. If there is, use the last af state for all results
8305 // preceding current partial frame number.
8306 for (auto & pendingRequest : mPendingRequestsList) {
8307 if (pendingRequest.frame_number < frame_number) {
8308 pendingRequest.focusStateValid = true;
8309 pendingRequest.focusState = fwk_AfState;
8310 } else if (pendingRequest.frame_number == frame_number) {
8311 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8312 // Check if early AF state for trigger exists. If yes, send AF state as
8313 // partial result for better latency.
8314 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8315 pendingRequest.focusStateSent = true;
8316 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8317 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8318 frame_number, fwkEarlyAfState);
8319 }
8320 }
8321 }
8322 }
8323 }
8324 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8325 &mAfTrigger.trigger, 1);
8326 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8327
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008328 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8329 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008330 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008331 int32_t afRegions[REGIONS_TUPLE_COUNT];
8332 // Adjust crop region from sensor output coordinate system to active
8333 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008334 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8335 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008336
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008337 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008338 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8339 REGIONS_TUPLE_COUNT);
8340 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8341 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008342 hAfRect.left, hAfRect.top, hAfRect.width,
8343 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008344 }
8345
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008346 // AF region confidence
8347 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8348 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8349 }
8350
Thierry Strudel3d639192016-09-09 11:52:26 -07008351 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8352 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8353 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8354 if (NAME_NOT_FOUND != val) {
8355 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8356 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8357 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8358 } else {
8359 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8360 }
8361 }
8362
8363 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8364 uint32_t aeMode = CAM_AE_MODE_MAX;
8365 int32_t flashMode = CAM_FLASH_MODE_MAX;
8366 int32_t redeye = -1;
8367 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8368 aeMode = *pAeMode;
8369 }
8370 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8371 flashMode = *pFlashMode;
8372 }
8373 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8374 redeye = *pRedeye;
8375 }
8376
8377 if (1 == redeye) {
8378 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8379 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8380 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8381 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8382 flashMode);
8383 if (NAME_NOT_FOUND != val) {
8384 fwk_aeMode = (uint8_t)val;
8385 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8386 } else {
8387 LOGE("Unsupported flash mode %d", flashMode);
8388 }
8389 } else if (aeMode == CAM_AE_MODE_ON) {
8390 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8391 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8392 } else if (aeMode == CAM_AE_MODE_OFF) {
8393 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8394 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008395 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8396 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8397 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008398 } else {
8399 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8400 "flashMode:%d, aeMode:%u!!!",
8401 redeye, flashMode, aeMode);
8402 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008403 if (mInstantAEC) {
8404 // Increment frame Idx count untill a bound reached for instant AEC.
8405 mInstantAecFrameIdxCount++;
8406 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8407 CAM_INTF_META_AEC_INFO, metadata) {
8408 LOGH("ae_params->settled = %d",ae_params->settled);
8409 // If AEC settled, or if number of frames reached bound value,
8410 // should reset instant AEC.
8411 if (ae_params->settled ||
8412 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8413 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8414 mInstantAEC = false;
8415 mResetInstantAEC = true;
8416 mInstantAecFrameIdxCount = 0;
8417 }
8418 }
8419 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008420 resultMetadata = camMetadata.release();
8421 return resultMetadata;
8422}
8423
8424/*===========================================================================
8425 * FUNCTION : dumpMetadataToFile
8426 *
8427 * DESCRIPTION: Dumps tuning metadata to file system
8428 *
8429 * PARAMETERS :
8430 * @meta : tuning metadata
8431 * @dumpFrameCount : current dump frame count
8432 * @enabled : Enable mask
8433 *
8434 *==========================================================================*/
8435void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8436 uint32_t &dumpFrameCount,
8437 bool enabled,
8438 const char *type,
8439 uint32_t frameNumber)
8440{
8441 //Some sanity checks
8442 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8443 LOGE("Tuning sensor data size bigger than expected %d: %d",
8444 meta.tuning_sensor_data_size,
8445 TUNING_SENSOR_DATA_MAX);
8446 return;
8447 }
8448
8449 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8450 LOGE("Tuning VFE data size bigger than expected %d: %d",
8451 meta.tuning_vfe_data_size,
8452 TUNING_VFE_DATA_MAX);
8453 return;
8454 }
8455
8456 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8457 LOGE("Tuning CPP data size bigger than expected %d: %d",
8458 meta.tuning_cpp_data_size,
8459 TUNING_CPP_DATA_MAX);
8460 return;
8461 }
8462
8463 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8464 LOGE("Tuning CAC data size bigger than expected %d: %d",
8465 meta.tuning_cac_data_size,
8466 TUNING_CAC_DATA_MAX);
8467 return;
8468 }
8469 //
8470
8471 if(enabled){
8472 char timeBuf[FILENAME_MAX];
8473 char buf[FILENAME_MAX];
8474 memset(buf, 0, sizeof(buf));
8475 memset(timeBuf, 0, sizeof(timeBuf));
8476 time_t current_time;
8477 struct tm * timeinfo;
8478 time (&current_time);
8479 timeinfo = localtime (&current_time);
8480 if (timeinfo != NULL) {
8481 strftime (timeBuf, sizeof(timeBuf),
8482 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8483 }
8484 String8 filePath(timeBuf);
8485 snprintf(buf,
8486 sizeof(buf),
8487 "%dm_%s_%d.bin",
8488 dumpFrameCount,
8489 type,
8490 frameNumber);
8491 filePath.append(buf);
8492 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8493 if (file_fd >= 0) {
8494 ssize_t written_len = 0;
8495 meta.tuning_data_version = TUNING_DATA_VERSION;
8496 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8497 written_len += write(file_fd, data, sizeof(uint32_t));
8498 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8499 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8500 written_len += write(file_fd, data, sizeof(uint32_t));
8501 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8502 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8503 written_len += write(file_fd, data, sizeof(uint32_t));
8504 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8505 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8506 written_len += write(file_fd, data, sizeof(uint32_t));
8507 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8508 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8509 written_len += write(file_fd, data, sizeof(uint32_t));
8510 meta.tuning_mod3_data_size = 0;
8511 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8512 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8513 written_len += write(file_fd, data, sizeof(uint32_t));
8514 size_t total_size = meta.tuning_sensor_data_size;
8515 data = (void *)((uint8_t *)&meta.data);
8516 written_len += write(file_fd, data, total_size);
8517 total_size = meta.tuning_vfe_data_size;
8518 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8519 written_len += write(file_fd, data, total_size);
8520 total_size = meta.tuning_cpp_data_size;
8521 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8522 written_len += write(file_fd, data, total_size);
8523 total_size = meta.tuning_cac_data_size;
8524 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8525 written_len += write(file_fd, data, total_size);
8526 close(file_fd);
8527 }else {
8528 LOGE("fail to open file for metadata dumping");
8529 }
8530 }
8531}
8532
8533/*===========================================================================
8534 * FUNCTION : cleanAndSortStreamInfo
8535 *
8536 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8537 * and sort them such that raw stream is at the end of the list
8538 * This is a workaround for camera daemon constraint.
8539 *
8540 * PARAMETERS : None
8541 *
8542 *==========================================================================*/
8543void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8544{
8545 List<stream_info_t *> newStreamInfo;
8546
8547 /*clean up invalid streams*/
8548 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8549 it != mStreamInfo.end();) {
8550 if(((*it)->status) == INVALID){
8551 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8552 delete channel;
8553 free(*it);
8554 it = mStreamInfo.erase(it);
8555 } else {
8556 it++;
8557 }
8558 }
8559
8560 // Move preview/video/callback/snapshot streams into newList
8561 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8562 it != mStreamInfo.end();) {
8563 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8564 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8565 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8566 newStreamInfo.push_back(*it);
8567 it = mStreamInfo.erase(it);
8568 } else
8569 it++;
8570 }
8571 // Move raw streams into newList
8572 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8573 it != mStreamInfo.end();) {
8574 newStreamInfo.push_back(*it);
8575 it = mStreamInfo.erase(it);
8576 }
8577
8578 mStreamInfo = newStreamInfo;
8579}
8580
8581/*===========================================================================
8582 * FUNCTION : extractJpegMetadata
8583 *
8584 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8585 * JPEG metadata is cached in HAL, and return as part of capture
8586 * result when metadata is returned from camera daemon.
8587 *
8588 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8589 * @request: capture request
8590 *
8591 *==========================================================================*/
8592void QCamera3HardwareInterface::extractJpegMetadata(
8593 CameraMetadata& jpegMetadata,
8594 const camera3_capture_request_t *request)
8595{
8596 CameraMetadata frame_settings;
8597 frame_settings = request->settings;
8598
8599 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8600 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8601 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8602 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8603
8604 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8605 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8606 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8607 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8608
8609 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8610 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8611 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8612 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8613
8614 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8615 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8616 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8617 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8618
8619 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8620 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8621 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8622 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8623
8624 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8625 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8626 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8627 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8628
8629 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8630 int32_t thumbnail_size[2];
8631 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8632 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8633 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8634 int32_t orientation =
8635 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008636 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008637 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8638 int32_t temp;
8639 temp = thumbnail_size[0];
8640 thumbnail_size[0] = thumbnail_size[1];
8641 thumbnail_size[1] = temp;
8642 }
8643 }
8644 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8645 thumbnail_size,
8646 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8647 }
8648
8649}
8650
8651/*===========================================================================
8652 * FUNCTION : convertToRegions
8653 *
8654 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8655 *
8656 * PARAMETERS :
8657 * @rect : cam_rect_t struct to convert
8658 * @region : int32_t destination array
8659 * @weight : if we are converting from cam_area_t, weight is valid
8660 * else weight = -1
8661 *
8662 *==========================================================================*/
8663void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8664 int32_t *region, int weight)
8665{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008666 region[FACE_LEFT] = rect.left;
8667 region[FACE_TOP] = rect.top;
8668 region[FACE_RIGHT] = rect.left + rect.width;
8669 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008670 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008671 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008672 }
8673}
8674
8675/*===========================================================================
8676 * FUNCTION : convertFromRegions
8677 *
8678 * DESCRIPTION: helper method to convert from array to cam_rect_t
8679 *
8680 * PARAMETERS :
8681 * @rect : cam_rect_t struct to convert
8682 * @region : int32_t destination array
8683 * @weight : if we are converting from cam_area_t, weight is valid
8684 * else weight = -1
8685 *
8686 *==========================================================================*/
8687void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008688 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008689{
Thierry Strudel3d639192016-09-09 11:52:26 -07008690 int32_t x_min = frame_settings.find(tag).data.i32[0];
8691 int32_t y_min = frame_settings.find(tag).data.i32[1];
8692 int32_t x_max = frame_settings.find(tag).data.i32[2];
8693 int32_t y_max = frame_settings.find(tag).data.i32[3];
8694 roi.weight = frame_settings.find(tag).data.i32[4];
8695 roi.rect.left = x_min;
8696 roi.rect.top = y_min;
8697 roi.rect.width = x_max - x_min;
8698 roi.rect.height = y_max - y_min;
8699}
8700
8701/*===========================================================================
8702 * FUNCTION : resetIfNeededROI
8703 *
8704 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8705 * crop region
8706 *
8707 * PARAMETERS :
8708 * @roi : cam_area_t struct to resize
8709 * @scalerCropRegion : cam_crop_region_t region to compare against
8710 *
8711 *
8712 *==========================================================================*/
8713bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8714 const cam_crop_region_t* scalerCropRegion)
8715{
8716 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8717 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8718 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8719 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8720
8721 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8722 * without having this check the calculations below to validate if the roi
8723 * is inside scalar crop region will fail resulting in the roi not being
8724 * reset causing algorithm to continue to use stale roi window
8725 */
8726 if (roi->weight == 0) {
8727 return true;
8728 }
8729
8730 if ((roi_x_max < scalerCropRegion->left) ||
8731 // right edge of roi window is left of scalar crop's left edge
8732 (roi_y_max < scalerCropRegion->top) ||
8733 // bottom edge of roi window is above scalar crop's top edge
8734 (roi->rect.left > crop_x_max) ||
8735 // left edge of roi window is beyond(right) of scalar crop's right edge
8736 (roi->rect.top > crop_y_max)){
8737 // top edge of roi windo is above scalar crop's top edge
8738 return false;
8739 }
8740 if (roi->rect.left < scalerCropRegion->left) {
8741 roi->rect.left = scalerCropRegion->left;
8742 }
8743 if (roi->rect.top < scalerCropRegion->top) {
8744 roi->rect.top = scalerCropRegion->top;
8745 }
8746 if (roi_x_max > crop_x_max) {
8747 roi_x_max = crop_x_max;
8748 }
8749 if (roi_y_max > crop_y_max) {
8750 roi_y_max = crop_y_max;
8751 }
8752 roi->rect.width = roi_x_max - roi->rect.left;
8753 roi->rect.height = roi_y_max - roi->rect.top;
8754 return true;
8755}
8756
8757/*===========================================================================
8758 * FUNCTION : convertLandmarks
8759 *
8760 * DESCRIPTION: helper method to extract the landmarks from face detection info
8761 *
8762 * PARAMETERS :
8763 * @landmark_data : input landmark data to be converted
8764 * @landmarks : int32_t destination array
8765 *
8766 *
8767 *==========================================================================*/
8768void QCamera3HardwareInterface::convertLandmarks(
8769 cam_face_landmarks_info_t landmark_data,
8770 int32_t *landmarks)
8771{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008772 if (landmark_data.is_left_eye_valid) {
8773 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8774 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8775 } else {
8776 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8777 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8778 }
8779
8780 if (landmark_data.is_right_eye_valid) {
8781 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8782 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8783 } else {
8784 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8785 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8786 }
8787
8788 if (landmark_data.is_mouth_valid) {
8789 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8790 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8791 } else {
8792 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8793 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8794 }
8795}
8796
8797/*===========================================================================
8798 * FUNCTION : setInvalidLandmarks
8799 *
8800 * DESCRIPTION: helper method to set invalid landmarks
8801 *
8802 * PARAMETERS :
8803 * @landmarks : int32_t destination array
8804 *
8805 *
8806 *==========================================================================*/
8807void QCamera3HardwareInterface::setInvalidLandmarks(
8808 int32_t *landmarks)
8809{
8810 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8811 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8812 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8813 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8814 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8815 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008816}
8817
8818#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008819
8820/*===========================================================================
8821 * FUNCTION : getCapabilities
8822 *
8823 * DESCRIPTION: query camera capability from back-end
8824 *
8825 * PARAMETERS :
8826 * @ops : mm-interface ops structure
8827 * @cam_handle : camera handle for which we need capability
8828 *
8829 * RETURN : ptr type of capability structure
8830 * capability for success
8831 * NULL for failure
8832 *==========================================================================*/
8833cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8834 uint32_t cam_handle)
8835{
8836 int rc = NO_ERROR;
8837 QCamera3HeapMemory *capabilityHeap = NULL;
8838 cam_capability_t *cap_ptr = NULL;
8839
8840 if (ops == NULL) {
8841 LOGE("Invalid arguments");
8842 return NULL;
8843 }
8844
8845 capabilityHeap = new QCamera3HeapMemory(1);
8846 if (capabilityHeap == NULL) {
8847 LOGE("creation of capabilityHeap failed");
8848 return NULL;
8849 }
8850
8851 /* Allocate memory for capability buffer */
8852 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8853 if(rc != OK) {
8854 LOGE("No memory for cappability");
8855 goto allocate_failed;
8856 }
8857
8858 /* Map memory for capability buffer */
8859 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8860
8861 rc = ops->map_buf(cam_handle,
8862 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8863 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8864 if(rc < 0) {
8865 LOGE("failed to map capability buffer");
8866 rc = FAILED_TRANSACTION;
8867 goto map_failed;
8868 }
8869
8870 /* Query Capability */
8871 rc = ops->query_capability(cam_handle);
8872 if(rc < 0) {
8873 LOGE("failed to query capability");
8874 rc = FAILED_TRANSACTION;
8875 goto query_failed;
8876 }
8877
8878 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8879 if (cap_ptr == NULL) {
8880 LOGE("out of memory");
8881 rc = NO_MEMORY;
8882 goto query_failed;
8883 }
8884
8885 memset(cap_ptr, 0, sizeof(cam_capability_t));
8886 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8887
8888 int index;
8889 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8890 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8891 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8892 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8893 }
8894
8895query_failed:
8896 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8897map_failed:
8898 capabilityHeap->deallocate();
8899allocate_failed:
8900 delete capabilityHeap;
8901
8902 if (rc != NO_ERROR) {
8903 return NULL;
8904 } else {
8905 return cap_ptr;
8906 }
8907}
8908
Thierry Strudel3d639192016-09-09 11:52:26 -07008909/*===========================================================================
8910 * FUNCTION : initCapabilities
8911 *
8912 * DESCRIPTION: initialize camera capabilities in static data struct
8913 *
8914 * PARAMETERS :
8915 * @cameraId : camera Id
8916 *
8917 * RETURN : int32_t type of status
8918 * NO_ERROR -- success
8919 * none-zero failure code
8920 *==========================================================================*/
8921int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8922{
8923 int rc = 0;
8924 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008925 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008926
8927 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8928 if (rc) {
8929 LOGE("camera_open failed. rc = %d", rc);
8930 goto open_failed;
8931 }
8932 if (!cameraHandle) {
8933 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8934 goto open_failed;
8935 }
8936
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008937 handle = get_main_camera_handle(cameraHandle->camera_handle);
8938 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8939 if (gCamCapability[cameraId] == NULL) {
8940 rc = FAILED_TRANSACTION;
8941 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008942 }
8943
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008944 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008945 if (is_dual_camera_by_idx(cameraId)) {
8946 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8947 gCamCapability[cameraId]->aux_cam_cap =
8948 getCapabilities(cameraHandle->ops, handle);
8949 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8950 rc = FAILED_TRANSACTION;
8951 free(gCamCapability[cameraId]);
8952 goto failed_op;
8953 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008954
8955 // Copy the main camera capability to main_cam_cap struct
8956 gCamCapability[cameraId]->main_cam_cap =
8957 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8958 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8959 LOGE("out of memory");
8960 rc = NO_MEMORY;
8961 goto failed_op;
8962 }
8963 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8964 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008965 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008966failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008967 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8968 cameraHandle = NULL;
8969open_failed:
8970 return rc;
8971}
8972
8973/*==========================================================================
8974 * FUNCTION : get3Aversion
8975 *
8976 * DESCRIPTION: get the Q3A S/W version
8977 *
8978 * PARAMETERS :
8979 * @sw_version: Reference of Q3A structure which will hold version info upon
8980 * return
8981 *
8982 * RETURN : None
8983 *
8984 *==========================================================================*/
8985void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8986{
8987 if(gCamCapability[mCameraId])
8988 sw_version = gCamCapability[mCameraId]->q3a_version;
8989 else
8990 LOGE("Capability structure NULL!");
8991}
8992
8993
8994/*===========================================================================
8995 * FUNCTION : initParameters
8996 *
8997 * DESCRIPTION: initialize camera parameters
8998 *
8999 * PARAMETERS :
9000 *
9001 * RETURN : int32_t type of status
9002 * NO_ERROR -- success
9003 * none-zero failure code
9004 *==========================================================================*/
9005int QCamera3HardwareInterface::initParameters()
9006{
9007 int rc = 0;
9008
9009 //Allocate Set Param Buffer
9010 mParamHeap = new QCamera3HeapMemory(1);
9011 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9012 if(rc != OK) {
9013 rc = NO_MEMORY;
9014 LOGE("Failed to allocate SETPARM Heap memory");
9015 delete mParamHeap;
9016 mParamHeap = NULL;
9017 return rc;
9018 }
9019
9020 //Map memory for parameters buffer
9021 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9022 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9023 mParamHeap->getFd(0),
9024 sizeof(metadata_buffer_t),
9025 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9026 if(rc < 0) {
9027 LOGE("failed to map SETPARM buffer");
9028 rc = FAILED_TRANSACTION;
9029 mParamHeap->deallocate();
9030 delete mParamHeap;
9031 mParamHeap = NULL;
9032 return rc;
9033 }
9034
9035 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9036
9037 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9038 return rc;
9039}
9040
9041/*===========================================================================
9042 * FUNCTION : deinitParameters
9043 *
9044 * DESCRIPTION: de-initialize camera parameters
9045 *
9046 * PARAMETERS :
9047 *
9048 * RETURN : NONE
9049 *==========================================================================*/
9050void QCamera3HardwareInterface::deinitParameters()
9051{
9052 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9053 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9054
9055 mParamHeap->deallocate();
9056 delete mParamHeap;
9057 mParamHeap = NULL;
9058
9059 mParameters = NULL;
9060
9061 free(mPrevParameters);
9062 mPrevParameters = NULL;
9063}
9064
9065/*===========================================================================
9066 * FUNCTION : calcMaxJpegSize
9067 *
9068 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9069 *
9070 * PARAMETERS :
9071 *
9072 * RETURN : max_jpeg_size
9073 *==========================================================================*/
9074size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9075{
9076 size_t max_jpeg_size = 0;
9077 size_t temp_width, temp_height;
9078 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9079 MAX_SIZES_CNT);
9080 for (size_t i = 0; i < count; i++) {
9081 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9082 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9083 if (temp_width * temp_height > max_jpeg_size ) {
9084 max_jpeg_size = temp_width * temp_height;
9085 }
9086 }
9087 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9088 return max_jpeg_size;
9089}
9090
9091/*===========================================================================
9092 * FUNCTION : getMaxRawSize
9093 *
9094 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9095 *
9096 * PARAMETERS :
9097 *
9098 * RETURN : Largest supported Raw Dimension
9099 *==========================================================================*/
9100cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9101{
9102 int max_width = 0;
9103 cam_dimension_t maxRawSize;
9104
9105 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9106 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9107 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9108 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9109 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9110 }
9111 }
9112 return maxRawSize;
9113}
9114
9115
9116/*===========================================================================
9117 * FUNCTION : calcMaxJpegDim
9118 *
9119 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9120 *
9121 * PARAMETERS :
9122 *
9123 * RETURN : max_jpeg_dim
9124 *==========================================================================*/
9125cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9126{
9127 cam_dimension_t max_jpeg_dim;
9128 cam_dimension_t curr_jpeg_dim;
9129 max_jpeg_dim.width = 0;
9130 max_jpeg_dim.height = 0;
9131 curr_jpeg_dim.width = 0;
9132 curr_jpeg_dim.height = 0;
9133 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9134 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9135 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9136 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9137 max_jpeg_dim.width * max_jpeg_dim.height ) {
9138 max_jpeg_dim.width = curr_jpeg_dim.width;
9139 max_jpeg_dim.height = curr_jpeg_dim.height;
9140 }
9141 }
9142 return max_jpeg_dim;
9143}
9144
9145/*===========================================================================
9146 * FUNCTION : addStreamConfig
9147 *
9148 * DESCRIPTION: adds the stream configuration to the array
9149 *
9150 * PARAMETERS :
9151 * @available_stream_configs : pointer to stream configuration array
9152 * @scalar_format : scalar format
9153 * @dim : configuration dimension
9154 * @config_type : input or output configuration type
9155 *
9156 * RETURN : NONE
9157 *==========================================================================*/
9158void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9159 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9160{
9161 available_stream_configs.add(scalar_format);
9162 available_stream_configs.add(dim.width);
9163 available_stream_configs.add(dim.height);
9164 available_stream_configs.add(config_type);
9165}
9166
9167/*===========================================================================
9168 * FUNCTION : suppportBurstCapture
9169 *
9170 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9171 *
9172 * PARAMETERS :
9173 * @cameraId : camera Id
9174 *
9175 * RETURN : true if camera supports BURST_CAPTURE
9176 * false otherwise
9177 *==========================================================================*/
9178bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9179{
9180 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9181 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9182 const int32_t highResWidth = 3264;
9183 const int32_t highResHeight = 2448;
9184
9185 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9186 // Maximum resolution images cannot be captured at >= 10fps
9187 // -> not supporting BURST_CAPTURE
9188 return false;
9189 }
9190
9191 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9192 // Maximum resolution images can be captured at >= 20fps
9193 // --> supporting BURST_CAPTURE
9194 return true;
9195 }
9196
9197 // Find the smallest highRes resolution, or largest resolution if there is none
9198 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9199 MAX_SIZES_CNT);
9200 size_t highRes = 0;
9201 while ((highRes + 1 < totalCnt) &&
9202 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9203 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9204 highResWidth * highResHeight)) {
9205 highRes++;
9206 }
9207 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9208 return true;
9209 } else {
9210 return false;
9211 }
9212}
9213
9214/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009215 * FUNCTION : getPDStatIndex
9216 *
9217 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9218 *
9219 * PARAMETERS :
9220 * @caps : camera capabilities
9221 *
9222 * RETURN : int32_t type
9223 * non-negative - on success
9224 * -1 - on failure
9225 *==========================================================================*/
9226int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9227 if (nullptr == caps) {
9228 return -1;
9229 }
9230
9231 uint32_t metaRawCount = caps->meta_raw_channel_count;
9232 int32_t ret = -1;
9233 for (size_t i = 0; i < metaRawCount; i++) {
9234 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9235 ret = i;
9236 break;
9237 }
9238 }
9239
9240 return ret;
9241}
9242
9243/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009244 * FUNCTION : initStaticMetadata
9245 *
9246 * DESCRIPTION: initialize the static metadata
9247 *
9248 * PARAMETERS :
9249 * @cameraId : camera Id
9250 *
9251 * RETURN : int32_t type of status
9252 * 0 -- success
9253 * non-zero failure code
9254 *==========================================================================*/
9255int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9256{
9257 int rc = 0;
9258 CameraMetadata staticInfo;
9259 size_t count = 0;
9260 bool limitedDevice = false;
9261 char prop[PROPERTY_VALUE_MAX];
9262 bool supportBurst = false;
9263
9264 supportBurst = supportBurstCapture(cameraId);
9265
9266 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9267 * guaranteed or if min fps of max resolution is less than 20 fps, its
9268 * advertised as limited device*/
9269 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9270 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9271 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9272 !supportBurst;
9273
9274 uint8_t supportedHwLvl = limitedDevice ?
9275 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009276#ifndef USE_HAL_3_3
9277 // LEVEL_3 - This device will support level 3.
9278 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9279#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009280 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009281#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009282
9283 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9284 &supportedHwLvl, 1);
9285
9286 bool facingBack = false;
9287 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9288 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9289 facingBack = true;
9290 }
9291 /*HAL 3 only*/
9292 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9293 &gCamCapability[cameraId]->min_focus_distance, 1);
9294
9295 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9296 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9297
9298 /*should be using focal lengths but sensor doesn't provide that info now*/
9299 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9300 &gCamCapability[cameraId]->focal_length,
9301 1);
9302
9303 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9304 gCamCapability[cameraId]->apertures,
9305 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9306
9307 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9308 gCamCapability[cameraId]->filter_densities,
9309 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9310
9311
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009312 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9313 size_t mode_count =
9314 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9315 for (size_t i = 0; i < mode_count; i++) {
9316 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9317 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009318 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009319 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009320
9321 int32_t lens_shading_map_size[] = {
9322 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9323 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9324 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9325 lens_shading_map_size,
9326 sizeof(lens_shading_map_size)/sizeof(int32_t));
9327
9328 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9329 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9330
9331 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9332 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9333
9334 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9335 &gCamCapability[cameraId]->max_frame_duration, 1);
9336
9337 camera_metadata_rational baseGainFactor = {
9338 gCamCapability[cameraId]->base_gain_factor.numerator,
9339 gCamCapability[cameraId]->base_gain_factor.denominator};
9340 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9341 &baseGainFactor, 1);
9342
9343 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9344 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9345
9346 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9347 gCamCapability[cameraId]->pixel_array_size.height};
9348 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9349 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9350
9351 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9352 gCamCapability[cameraId]->active_array_size.top,
9353 gCamCapability[cameraId]->active_array_size.width,
9354 gCamCapability[cameraId]->active_array_size.height};
9355 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9356 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9357
9358 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9359 &gCamCapability[cameraId]->white_level, 1);
9360
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009361 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9362 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9363 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009364 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009365 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009366
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009367#ifndef USE_HAL_3_3
9368 bool hasBlackRegions = false;
9369 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9370 LOGW("black_region_count: %d is bounded to %d",
9371 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9372 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9373 }
9374 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9375 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9376 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9377 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9378 }
9379 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9380 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9381 hasBlackRegions = true;
9382 }
9383#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009384 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9385 &gCamCapability[cameraId]->flash_charge_duration, 1);
9386
9387 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9388 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9389
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009390 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9391 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9392 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009393 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9394 &timestampSource, 1);
9395
Thierry Strudel54dc9782017-02-15 12:12:10 -08009396 //update histogram vendor data
9397 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009398 &gCamCapability[cameraId]->histogram_size, 1);
9399
Thierry Strudel54dc9782017-02-15 12:12:10 -08009400 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009401 &gCamCapability[cameraId]->max_histogram_count, 1);
9402
Shuzhen Wang14415f52016-11-16 18:26:18 -08009403 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9404 //so that app can request fewer number of bins than the maximum supported.
9405 std::vector<int32_t> histBins;
9406 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9407 histBins.push_back(maxHistBins);
9408 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9409 (maxHistBins & 0x1) == 0) {
9410 histBins.push_back(maxHistBins >> 1);
9411 maxHistBins >>= 1;
9412 }
9413 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9414 histBins.data(), histBins.size());
9415
Thierry Strudel3d639192016-09-09 11:52:26 -07009416 int32_t sharpness_map_size[] = {
9417 gCamCapability[cameraId]->sharpness_map_size.width,
9418 gCamCapability[cameraId]->sharpness_map_size.height};
9419
9420 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9421 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9422
9423 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9424 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9425
Emilian Peev0f3c3162017-03-15 12:57:46 +00009426 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9427 if (0 <= indexPD) {
9428 // Advertise PD stats data as part of the Depth capabilities
9429 int32_t depthWidth =
9430 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9431 int32_t depthHeight =
9432 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009433 int32_t depthStride =
9434 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009435 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9436 assert(0 < depthSamplesCount);
9437 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9438 &depthSamplesCount, 1);
9439
9440 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9441 depthHeight,
9442 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9443 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9444 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9445 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9446 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9447
9448 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9449 depthHeight, 33333333,
9450 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9451 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9452 depthMinDuration,
9453 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9454
9455 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9456 depthHeight, 0,
9457 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9458 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9459 depthStallDuration,
9460 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9461
9462 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9463 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009464
9465 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9466 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9467 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009468 }
9469
Thierry Strudel3d639192016-09-09 11:52:26 -07009470 int32_t scalar_formats[] = {
9471 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9472 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9473 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9474 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9475 HAL_PIXEL_FORMAT_RAW10,
9476 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009477 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9478 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9479 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009480
9481 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9482 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9483 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9484 count, MAX_SIZES_CNT, available_processed_sizes);
9485 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9486 available_processed_sizes, count * 2);
9487
9488 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9489 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9490 makeTable(gCamCapability[cameraId]->raw_dim,
9491 count, MAX_SIZES_CNT, available_raw_sizes);
9492 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9493 available_raw_sizes, count * 2);
9494
9495 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9496 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9497 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9498 count, MAX_SIZES_CNT, available_fps_ranges);
9499 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9500 available_fps_ranges, count * 2);
9501
9502 camera_metadata_rational exposureCompensationStep = {
9503 gCamCapability[cameraId]->exp_compensation_step.numerator,
9504 gCamCapability[cameraId]->exp_compensation_step.denominator};
9505 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9506 &exposureCompensationStep, 1);
9507
9508 Vector<uint8_t> availableVstabModes;
9509 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9510 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009511 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009512 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009513 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009514 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009515 count = IS_TYPE_MAX;
9516 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9517 for (size_t i = 0; i < count; i++) {
9518 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9519 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9520 eisSupported = true;
9521 break;
9522 }
9523 }
9524 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009525 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9526 }
9527 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9528 availableVstabModes.array(), availableVstabModes.size());
9529
9530 /*HAL 1 and HAL 3 common*/
9531 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9532 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9533 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009534 // Cap the max zoom to the max preferred value
9535 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009536 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9537 &maxZoom, 1);
9538
9539 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9540 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9541
9542 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9543 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9544 max3aRegions[2] = 0; /* AF not supported */
9545 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9546 max3aRegions, 3);
9547
9548 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9549 memset(prop, 0, sizeof(prop));
9550 property_get("persist.camera.facedetect", prop, "1");
9551 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9552 LOGD("Support face detection mode: %d",
9553 supportedFaceDetectMode);
9554
9555 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009556 /* support mode should be OFF if max number of face is 0 */
9557 if (maxFaces <= 0) {
9558 supportedFaceDetectMode = 0;
9559 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009560 Vector<uint8_t> availableFaceDetectModes;
9561 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9562 if (supportedFaceDetectMode == 1) {
9563 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9564 } else if (supportedFaceDetectMode == 2) {
9565 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9566 } else if (supportedFaceDetectMode == 3) {
9567 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9568 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9569 } else {
9570 maxFaces = 0;
9571 }
9572 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9573 availableFaceDetectModes.array(),
9574 availableFaceDetectModes.size());
9575 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9576 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009577 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9578 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9579 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009580
9581 int32_t exposureCompensationRange[] = {
9582 gCamCapability[cameraId]->exposure_compensation_min,
9583 gCamCapability[cameraId]->exposure_compensation_max};
9584 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9585 exposureCompensationRange,
9586 sizeof(exposureCompensationRange)/sizeof(int32_t));
9587
9588 uint8_t lensFacing = (facingBack) ?
9589 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9590 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9591
9592 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9593 available_thumbnail_sizes,
9594 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9595
9596 /*all sizes will be clubbed into this tag*/
9597 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9598 /*android.scaler.availableStreamConfigurations*/
9599 Vector<int32_t> available_stream_configs;
9600 cam_dimension_t active_array_dim;
9601 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9602 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009603
9604 /*advertise list of input dimensions supported based on below property.
9605 By default all sizes upto 5MP will be advertised.
9606 Note that the setprop resolution format should be WxH.
9607 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9608 To list all supported sizes, setprop needs to be set with "0x0" */
9609 cam_dimension_t minInputSize = {2592,1944}; //5MP
9610 memset(prop, 0, sizeof(prop));
9611 property_get("persist.camera.input.minsize", prop, "2592x1944");
9612 if (strlen(prop) > 0) {
9613 char *saveptr = NULL;
9614 char *token = strtok_r(prop, "x", &saveptr);
9615 if (token != NULL) {
9616 minInputSize.width = atoi(token);
9617 }
9618 token = strtok_r(NULL, "x", &saveptr);
9619 if (token != NULL) {
9620 minInputSize.height = atoi(token);
9621 }
9622 }
9623
Thierry Strudel3d639192016-09-09 11:52:26 -07009624 /* Add input/output stream configurations for each scalar formats*/
9625 for (size_t j = 0; j < scalar_formats_count; j++) {
9626 switch (scalar_formats[j]) {
9627 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9628 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9629 case HAL_PIXEL_FORMAT_RAW10:
9630 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9631 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9632 addStreamConfig(available_stream_configs, scalar_formats[j],
9633 gCamCapability[cameraId]->raw_dim[i],
9634 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9635 }
9636 break;
9637 case HAL_PIXEL_FORMAT_BLOB:
9638 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9639 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9640 addStreamConfig(available_stream_configs, scalar_formats[j],
9641 gCamCapability[cameraId]->picture_sizes_tbl[i],
9642 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9643 }
9644 break;
9645 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9646 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9647 default:
9648 cam_dimension_t largest_picture_size;
9649 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9650 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9651 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9652 addStreamConfig(available_stream_configs, scalar_formats[j],
9653 gCamCapability[cameraId]->picture_sizes_tbl[i],
9654 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009655 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009656 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9657 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009658 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9659 >= minInputSize.width) || (gCamCapability[cameraId]->
9660 picture_sizes_tbl[i].height >= minInputSize.height)) {
9661 addStreamConfig(available_stream_configs, scalar_formats[j],
9662 gCamCapability[cameraId]->picture_sizes_tbl[i],
9663 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9664 }
9665 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009666 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009667
Thierry Strudel3d639192016-09-09 11:52:26 -07009668 break;
9669 }
9670 }
9671
9672 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9673 available_stream_configs.array(), available_stream_configs.size());
9674 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9675 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9676
9677 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9678 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9679
9680 /* android.scaler.availableMinFrameDurations */
9681 Vector<int64_t> available_min_durations;
9682 for (size_t j = 0; j < scalar_formats_count; j++) {
9683 switch (scalar_formats[j]) {
9684 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9685 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9686 case HAL_PIXEL_FORMAT_RAW10:
9687 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9688 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9689 available_min_durations.add(scalar_formats[j]);
9690 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9691 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9692 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9693 }
9694 break;
9695 default:
9696 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9697 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9698 available_min_durations.add(scalar_formats[j]);
9699 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9700 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9701 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9702 }
9703 break;
9704 }
9705 }
9706 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9707 available_min_durations.array(), available_min_durations.size());
9708
9709 Vector<int32_t> available_hfr_configs;
9710 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9711 int32_t fps = 0;
9712 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9713 case CAM_HFR_MODE_60FPS:
9714 fps = 60;
9715 break;
9716 case CAM_HFR_MODE_90FPS:
9717 fps = 90;
9718 break;
9719 case CAM_HFR_MODE_120FPS:
9720 fps = 120;
9721 break;
9722 case CAM_HFR_MODE_150FPS:
9723 fps = 150;
9724 break;
9725 case CAM_HFR_MODE_180FPS:
9726 fps = 180;
9727 break;
9728 case CAM_HFR_MODE_210FPS:
9729 fps = 210;
9730 break;
9731 case CAM_HFR_MODE_240FPS:
9732 fps = 240;
9733 break;
9734 case CAM_HFR_MODE_480FPS:
9735 fps = 480;
9736 break;
9737 case CAM_HFR_MODE_OFF:
9738 case CAM_HFR_MODE_MAX:
9739 default:
9740 break;
9741 }
9742
9743 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9744 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9745 /* For each HFR frame rate, need to advertise one variable fps range
9746 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9747 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9748 * set by the app. When video recording is started, [120, 120] is
9749 * set. This way sensor configuration does not change when recording
9750 * is started */
9751
9752 /* (width, height, fps_min, fps_max, batch_size_max) */
9753 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9754 j < MAX_SIZES_CNT; j++) {
9755 available_hfr_configs.add(
9756 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9757 available_hfr_configs.add(
9758 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9759 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9760 available_hfr_configs.add(fps);
9761 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9762
9763 /* (width, height, fps_min, fps_max, batch_size_max) */
9764 available_hfr_configs.add(
9765 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9766 available_hfr_configs.add(
9767 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9768 available_hfr_configs.add(fps);
9769 available_hfr_configs.add(fps);
9770 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9771 }
9772 }
9773 }
9774 //Advertise HFR capability only if the property is set
9775 memset(prop, 0, sizeof(prop));
9776 property_get("persist.camera.hal3hfr.enable", prop, "1");
9777 uint8_t hfrEnable = (uint8_t)atoi(prop);
9778
9779 if(hfrEnable && available_hfr_configs.array()) {
9780 staticInfo.update(
9781 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9782 available_hfr_configs.array(), available_hfr_configs.size());
9783 }
9784
9785 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9786 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9787 &max_jpeg_size, 1);
9788
9789 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9790 size_t size = 0;
9791 count = CAM_EFFECT_MODE_MAX;
9792 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9793 for (size_t i = 0; i < count; i++) {
9794 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9795 gCamCapability[cameraId]->supported_effects[i]);
9796 if (NAME_NOT_FOUND != val) {
9797 avail_effects[size] = (uint8_t)val;
9798 size++;
9799 }
9800 }
9801 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9802 avail_effects,
9803 size);
9804
9805 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9806 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9807 size_t supported_scene_modes_cnt = 0;
9808 count = CAM_SCENE_MODE_MAX;
9809 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9810 for (size_t i = 0; i < count; i++) {
9811 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9812 CAM_SCENE_MODE_OFF) {
9813 int val = lookupFwkName(SCENE_MODES_MAP,
9814 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9815 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009816
Thierry Strudel3d639192016-09-09 11:52:26 -07009817 if (NAME_NOT_FOUND != val) {
9818 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9819 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9820 supported_scene_modes_cnt++;
9821 }
9822 }
9823 }
9824 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9825 avail_scene_modes,
9826 supported_scene_modes_cnt);
9827
9828 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9829 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9830 supported_scene_modes_cnt,
9831 CAM_SCENE_MODE_MAX,
9832 scene_mode_overrides,
9833 supported_indexes,
9834 cameraId);
9835
9836 if (supported_scene_modes_cnt == 0) {
9837 supported_scene_modes_cnt = 1;
9838 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9839 }
9840
9841 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9842 scene_mode_overrides, supported_scene_modes_cnt * 3);
9843
9844 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9845 ANDROID_CONTROL_MODE_AUTO,
9846 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9847 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9848 available_control_modes,
9849 3);
9850
9851 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9852 size = 0;
9853 count = CAM_ANTIBANDING_MODE_MAX;
9854 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9855 for (size_t i = 0; i < count; i++) {
9856 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9857 gCamCapability[cameraId]->supported_antibandings[i]);
9858 if (NAME_NOT_FOUND != val) {
9859 avail_antibanding_modes[size] = (uint8_t)val;
9860 size++;
9861 }
9862
9863 }
9864 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9865 avail_antibanding_modes,
9866 size);
9867
9868 uint8_t avail_abberation_modes[] = {
9869 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9870 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9871 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9872 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9873 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9874 if (0 == count) {
9875 // If no aberration correction modes are available for a device, this advertise OFF mode
9876 size = 1;
9877 } else {
9878 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9879 // So, advertize all 3 modes if atleast any one mode is supported as per the
9880 // new M requirement
9881 size = 3;
9882 }
9883 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9884 avail_abberation_modes,
9885 size);
9886
9887 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9888 size = 0;
9889 count = CAM_FOCUS_MODE_MAX;
9890 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9891 for (size_t i = 0; i < count; i++) {
9892 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9893 gCamCapability[cameraId]->supported_focus_modes[i]);
9894 if (NAME_NOT_FOUND != val) {
9895 avail_af_modes[size] = (uint8_t)val;
9896 size++;
9897 }
9898 }
9899 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9900 avail_af_modes,
9901 size);
9902
9903 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9904 size = 0;
9905 count = CAM_WB_MODE_MAX;
9906 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9907 for (size_t i = 0; i < count; i++) {
9908 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9909 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9910 gCamCapability[cameraId]->supported_white_balances[i]);
9911 if (NAME_NOT_FOUND != val) {
9912 avail_awb_modes[size] = (uint8_t)val;
9913 size++;
9914 }
9915 }
9916 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9917 avail_awb_modes,
9918 size);
9919
9920 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9921 count = CAM_FLASH_FIRING_LEVEL_MAX;
9922 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9923 count);
9924 for (size_t i = 0; i < count; i++) {
9925 available_flash_levels[i] =
9926 gCamCapability[cameraId]->supported_firing_levels[i];
9927 }
9928 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9929 available_flash_levels, count);
9930
9931 uint8_t flashAvailable;
9932 if (gCamCapability[cameraId]->flash_available)
9933 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9934 else
9935 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9936 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9937 &flashAvailable, 1);
9938
9939 Vector<uint8_t> avail_ae_modes;
9940 count = CAM_AE_MODE_MAX;
9941 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9942 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009943 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9944 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9945 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9946 }
9947 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009948 }
9949 if (flashAvailable) {
9950 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9951 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9952 }
9953 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9954 avail_ae_modes.array(),
9955 avail_ae_modes.size());
9956
9957 int32_t sensitivity_range[2];
9958 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9959 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9960 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9961 sensitivity_range,
9962 sizeof(sensitivity_range) / sizeof(int32_t));
9963
9964 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9965 &gCamCapability[cameraId]->max_analog_sensitivity,
9966 1);
9967
9968 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9969 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9970 &sensor_orientation,
9971 1);
9972
9973 int32_t max_output_streams[] = {
9974 MAX_STALLING_STREAMS,
9975 MAX_PROCESSED_STREAMS,
9976 MAX_RAW_STREAMS};
9977 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9978 max_output_streams,
9979 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9980
9981 uint8_t avail_leds = 0;
9982 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9983 &avail_leds, 0);
9984
9985 uint8_t focus_dist_calibrated;
9986 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9987 gCamCapability[cameraId]->focus_dist_calibrated);
9988 if (NAME_NOT_FOUND != val) {
9989 focus_dist_calibrated = (uint8_t)val;
9990 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9991 &focus_dist_calibrated, 1);
9992 }
9993
9994 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9995 size = 0;
9996 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9997 MAX_TEST_PATTERN_CNT);
9998 for (size_t i = 0; i < count; i++) {
9999 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10000 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10001 if (NAME_NOT_FOUND != testpatternMode) {
10002 avail_testpattern_modes[size] = testpatternMode;
10003 size++;
10004 }
10005 }
10006 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10007 avail_testpattern_modes,
10008 size);
10009
10010 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10011 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10012 &max_pipeline_depth,
10013 1);
10014
10015 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10016 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10017 &partial_result_count,
10018 1);
10019
10020 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10021 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10022
10023 Vector<uint8_t> available_capabilities;
10024 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10025 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10026 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10027 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10028 if (supportBurst) {
10029 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10030 }
10031 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10032 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10033 if (hfrEnable && available_hfr_configs.array()) {
10034 available_capabilities.add(
10035 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10036 }
10037
10038 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10039 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10040 }
10041 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10042 available_capabilities.array(),
10043 available_capabilities.size());
10044
10045 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10046 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10047 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10048 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10049
10050 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10051 &aeLockAvailable, 1);
10052
10053 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10054 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10055 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10056 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10057
10058 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10059 &awbLockAvailable, 1);
10060
10061 int32_t max_input_streams = 1;
10062 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10063 &max_input_streams,
10064 1);
10065
10066 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10067 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10068 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10069 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10070 HAL_PIXEL_FORMAT_YCbCr_420_888};
10071 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10072 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10073
10074 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10075 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10076 &max_latency,
10077 1);
10078
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010079#ifndef USE_HAL_3_3
10080 int32_t isp_sensitivity_range[2];
10081 isp_sensitivity_range[0] =
10082 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10083 isp_sensitivity_range[1] =
10084 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10085 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10086 isp_sensitivity_range,
10087 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10088#endif
10089
Thierry Strudel3d639192016-09-09 11:52:26 -070010090 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10091 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10092 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10093 available_hot_pixel_modes,
10094 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10095
10096 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10097 ANDROID_SHADING_MODE_FAST,
10098 ANDROID_SHADING_MODE_HIGH_QUALITY};
10099 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10100 available_shading_modes,
10101 3);
10102
10103 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10104 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10105 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10106 available_lens_shading_map_modes,
10107 2);
10108
10109 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10110 ANDROID_EDGE_MODE_FAST,
10111 ANDROID_EDGE_MODE_HIGH_QUALITY,
10112 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10113 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10114 available_edge_modes,
10115 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10116
10117 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10118 ANDROID_NOISE_REDUCTION_MODE_FAST,
10119 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10120 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10121 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10122 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10123 available_noise_red_modes,
10124 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10125
10126 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10127 ANDROID_TONEMAP_MODE_FAST,
10128 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10129 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10130 available_tonemap_modes,
10131 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10132
10133 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10134 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10135 available_hot_pixel_map_modes,
10136 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10137
10138 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10139 gCamCapability[cameraId]->reference_illuminant1);
10140 if (NAME_NOT_FOUND != val) {
10141 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10142 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10143 }
10144
10145 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10146 gCamCapability[cameraId]->reference_illuminant2);
10147 if (NAME_NOT_FOUND != val) {
10148 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10149 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10150 }
10151
10152 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10153 (void *)gCamCapability[cameraId]->forward_matrix1,
10154 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10155
10156 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10157 (void *)gCamCapability[cameraId]->forward_matrix2,
10158 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10159
10160 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10161 (void *)gCamCapability[cameraId]->color_transform1,
10162 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10163
10164 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10165 (void *)gCamCapability[cameraId]->color_transform2,
10166 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10167
10168 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10169 (void *)gCamCapability[cameraId]->calibration_transform1,
10170 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10171
10172 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10173 (void *)gCamCapability[cameraId]->calibration_transform2,
10174 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10175
10176 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10177 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10178 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10179 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10180 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10181 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10182 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10183 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10184 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10185 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10186 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10187 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10188 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10189 ANDROID_JPEG_GPS_COORDINATES,
10190 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10191 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10192 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10193 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10194 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10195 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10196 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10197 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10198 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10199 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010200#ifndef USE_HAL_3_3
10201 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10202#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010203 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010204 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010205 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10206 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010207 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010208 /* DevCamDebug metadata request_keys_basic */
10209 DEVCAMDEBUG_META_ENABLE,
10210 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010211 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010212 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010213 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010214 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010215 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010216 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010217
10218 size_t request_keys_cnt =
10219 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10220 Vector<int32_t> available_request_keys;
10221 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10222 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10223 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10224 }
10225
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010226 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010227 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010228 }
10229
Thierry Strudel3d639192016-09-09 11:52:26 -070010230 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10231 available_request_keys.array(), available_request_keys.size());
10232
10233 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10234 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10235 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10236 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10237 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10238 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10239 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10240 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10241 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10242 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10243 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10244 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10245 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10246 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10247 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10248 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10249 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010250 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010251 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10252 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10253 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010254 ANDROID_STATISTICS_FACE_SCORES,
10255#ifndef USE_HAL_3_3
10256 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10257#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010258 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010259 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010260 // DevCamDebug metadata result_keys_basic
10261 DEVCAMDEBUG_META_ENABLE,
10262 // DevCamDebug metadata result_keys AF
10263 DEVCAMDEBUG_AF_LENS_POSITION,
10264 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10265 DEVCAMDEBUG_AF_TOF_DISTANCE,
10266 DEVCAMDEBUG_AF_LUMA,
10267 DEVCAMDEBUG_AF_HAF_STATE,
10268 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10269 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10270 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10271 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10272 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10273 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10274 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10275 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10276 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10277 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10278 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10279 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10280 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10281 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10282 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10283 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10284 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10285 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10286 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10287 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10288 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10289 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10290 // DevCamDebug metadata result_keys AEC
10291 DEVCAMDEBUG_AEC_TARGET_LUMA,
10292 DEVCAMDEBUG_AEC_COMP_LUMA,
10293 DEVCAMDEBUG_AEC_AVG_LUMA,
10294 DEVCAMDEBUG_AEC_CUR_LUMA,
10295 DEVCAMDEBUG_AEC_LINECOUNT,
10296 DEVCAMDEBUG_AEC_REAL_GAIN,
10297 DEVCAMDEBUG_AEC_EXP_INDEX,
10298 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010299 // DevCamDebug metadata result_keys zzHDR
10300 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10301 DEVCAMDEBUG_AEC_L_LINECOUNT,
10302 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10303 DEVCAMDEBUG_AEC_S_LINECOUNT,
10304 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10305 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10306 // DevCamDebug metadata result_keys ADRC
10307 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10308 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10309 DEVCAMDEBUG_AEC_GTM_RATIO,
10310 DEVCAMDEBUG_AEC_LTM_RATIO,
10311 DEVCAMDEBUG_AEC_LA_RATIO,
10312 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010313 // DevCamDebug metadata result_keys AEC MOTION
10314 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10315 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10316 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010317 // DevCamDebug metadata result_keys AWB
10318 DEVCAMDEBUG_AWB_R_GAIN,
10319 DEVCAMDEBUG_AWB_G_GAIN,
10320 DEVCAMDEBUG_AWB_B_GAIN,
10321 DEVCAMDEBUG_AWB_CCT,
10322 DEVCAMDEBUG_AWB_DECISION,
10323 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010324 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10325 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10326 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010327 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010328 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010329 };
10330
Thierry Strudel3d639192016-09-09 11:52:26 -070010331 size_t result_keys_cnt =
10332 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10333
10334 Vector<int32_t> available_result_keys;
10335 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10336 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10337 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10338 }
10339 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10340 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10341 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10342 }
10343 if (supportedFaceDetectMode == 1) {
10344 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10345 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10346 } else if ((supportedFaceDetectMode == 2) ||
10347 (supportedFaceDetectMode == 3)) {
10348 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10349 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10350 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010351#ifndef USE_HAL_3_3
10352 if (hasBlackRegions) {
10353 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10354 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10355 }
10356#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010357
10358 if (gExposeEnableZslKey) {
10359 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010360 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010361 }
10362
Thierry Strudel3d639192016-09-09 11:52:26 -070010363 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10364 available_result_keys.array(), available_result_keys.size());
10365
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010366 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010367 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10368 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10369 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10370 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10371 ANDROID_SCALER_CROPPING_TYPE,
10372 ANDROID_SYNC_MAX_LATENCY,
10373 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10374 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10375 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10376 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10377 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10378 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10379 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10380 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10381 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10382 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10383 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10384 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10385 ANDROID_LENS_FACING,
10386 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10387 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10388 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10389 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10390 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10391 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10392 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10393 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10394 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10395 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10396 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10397 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10398 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10399 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10400 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10401 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10402 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10403 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10404 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10405 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010406 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010407 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10408 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10409 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10410 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10411 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10412 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10413 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10414 ANDROID_CONTROL_AVAILABLE_MODES,
10415 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10416 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10417 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10418 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010419 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10420#ifndef USE_HAL_3_3
10421 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10422 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10423#endif
10424 };
10425
10426 Vector<int32_t> available_characteristics_keys;
10427 available_characteristics_keys.appendArray(characteristics_keys_basic,
10428 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10429#ifndef USE_HAL_3_3
10430 if (hasBlackRegions) {
10431 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10432 }
10433#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010434
10435 if (0 <= indexPD) {
10436 int32_t depthKeys[] = {
10437 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10438 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10439 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10440 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10441 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10442 };
10443 available_characteristics_keys.appendArray(depthKeys,
10444 sizeof(depthKeys) / sizeof(depthKeys[0]));
10445 }
10446
Thierry Strudel3d639192016-09-09 11:52:26 -070010447 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010448 available_characteristics_keys.array(),
10449 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010450
10451 /*available stall durations depend on the hw + sw and will be different for different devices */
10452 /*have to add for raw after implementation*/
10453 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10454 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10455
10456 Vector<int64_t> available_stall_durations;
10457 for (uint32_t j = 0; j < stall_formats_count; j++) {
10458 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10459 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10460 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10461 available_stall_durations.add(stall_formats[j]);
10462 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10463 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10464 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10465 }
10466 } else {
10467 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10468 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10469 available_stall_durations.add(stall_formats[j]);
10470 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10471 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10472 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10473 }
10474 }
10475 }
10476 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10477 available_stall_durations.array(),
10478 available_stall_durations.size());
10479
10480 //QCAMERA3_OPAQUE_RAW
10481 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10482 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10483 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10484 case LEGACY_RAW:
10485 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10486 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10487 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10488 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10489 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10490 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10491 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10492 break;
10493 case MIPI_RAW:
10494 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10495 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10496 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10497 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10498 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10499 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10500 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10501 break;
10502 default:
10503 LOGE("unknown opaque_raw_format %d",
10504 gCamCapability[cameraId]->opaque_raw_fmt);
10505 break;
10506 }
10507 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10508
10509 Vector<int32_t> strides;
10510 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10511 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10512 cam_stream_buf_plane_info_t buf_planes;
10513 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10514 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10515 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10516 &gCamCapability[cameraId]->padding_info, &buf_planes);
10517 strides.add(buf_planes.plane_info.mp[0].stride);
10518 }
10519 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10520 strides.size());
10521
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010522 //TBD: remove the following line once backend advertises zzHDR in feature mask
10523 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010524 //Video HDR default
10525 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10526 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010527 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010528 int32_t vhdr_mode[] = {
10529 QCAMERA3_VIDEO_HDR_MODE_OFF,
10530 QCAMERA3_VIDEO_HDR_MODE_ON};
10531
10532 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10533 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10534 vhdr_mode, vhdr_mode_count);
10535 }
10536
Thierry Strudel3d639192016-09-09 11:52:26 -070010537 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10538 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10539 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10540
10541 uint8_t isMonoOnly =
10542 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10543 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10544 &isMonoOnly, 1);
10545
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010546#ifndef USE_HAL_3_3
10547 Vector<int32_t> opaque_size;
10548 for (size_t j = 0; j < scalar_formats_count; j++) {
10549 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10550 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10551 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10552 cam_stream_buf_plane_info_t buf_planes;
10553
10554 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10555 &gCamCapability[cameraId]->padding_info, &buf_planes);
10556
10557 if (rc == 0) {
10558 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10559 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10560 opaque_size.add(buf_planes.plane_info.frame_len);
10561 }else {
10562 LOGE("raw frame calculation failed!");
10563 }
10564 }
10565 }
10566 }
10567
10568 if ((opaque_size.size() > 0) &&
10569 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10570 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10571 else
10572 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10573#endif
10574
Thierry Strudel04e026f2016-10-10 11:27:36 -070010575 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10576 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10577 size = 0;
10578 count = CAM_IR_MODE_MAX;
10579 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10580 for (size_t i = 0; i < count; i++) {
10581 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10582 gCamCapability[cameraId]->supported_ir_modes[i]);
10583 if (NAME_NOT_FOUND != val) {
10584 avail_ir_modes[size] = (int32_t)val;
10585 size++;
10586 }
10587 }
10588 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10589 avail_ir_modes, size);
10590 }
10591
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010592 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10593 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10594 size = 0;
10595 count = CAM_AEC_CONVERGENCE_MAX;
10596 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10597 for (size_t i = 0; i < count; i++) {
10598 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10599 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10600 if (NAME_NOT_FOUND != val) {
10601 available_instant_aec_modes[size] = (int32_t)val;
10602 size++;
10603 }
10604 }
10605 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10606 available_instant_aec_modes, size);
10607 }
10608
Thierry Strudel54dc9782017-02-15 12:12:10 -080010609 int32_t sharpness_range[] = {
10610 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10611 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10612 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10613
10614 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10615 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10616 size = 0;
10617 count = CAM_BINNING_CORRECTION_MODE_MAX;
10618 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10619 for (size_t i = 0; i < count; i++) {
10620 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10621 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10622 gCamCapability[cameraId]->supported_binning_modes[i]);
10623 if (NAME_NOT_FOUND != val) {
10624 avail_binning_modes[size] = (int32_t)val;
10625 size++;
10626 }
10627 }
10628 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10629 avail_binning_modes, size);
10630 }
10631
10632 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10633 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10634 size = 0;
10635 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10636 for (size_t i = 0; i < count; i++) {
10637 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10638 gCamCapability[cameraId]->supported_aec_modes[i]);
10639 if (NAME_NOT_FOUND != val)
10640 available_aec_modes[size++] = val;
10641 }
10642 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10643 available_aec_modes, size);
10644 }
10645
10646 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10647 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10648 size = 0;
10649 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10650 for (size_t i = 0; i < count; i++) {
10651 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10652 gCamCapability[cameraId]->supported_iso_modes[i]);
10653 if (NAME_NOT_FOUND != val)
10654 available_iso_modes[size++] = val;
10655 }
10656 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10657 available_iso_modes, size);
10658 }
10659
10660 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010661 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010662 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10663 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10664 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10665
10666 int32_t available_saturation_range[4];
10667 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10668 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10669 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10670 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10671 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10672 available_saturation_range, 4);
10673
10674 uint8_t is_hdr_values[2];
10675 is_hdr_values[0] = 0;
10676 is_hdr_values[1] = 1;
10677 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10678 is_hdr_values, 2);
10679
10680 float is_hdr_confidence_range[2];
10681 is_hdr_confidence_range[0] = 0.0;
10682 is_hdr_confidence_range[1] = 1.0;
10683 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10684 is_hdr_confidence_range, 2);
10685
Emilian Peev0a972ef2017-03-16 10:25:53 +000010686 size_t eepromLength = strnlen(
10687 reinterpret_cast<const char *>(
10688 gCamCapability[cameraId]->eeprom_version_info),
10689 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10690 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010691 char easelInfo[] = ",E:N";
10692 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10693 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10694 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010695 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010696 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010697 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010698 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010699 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10700 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10701 }
10702
Thierry Strudel3d639192016-09-09 11:52:26 -070010703 gStaticMetadata[cameraId] = staticInfo.release();
10704 return rc;
10705}
10706
10707/*===========================================================================
10708 * FUNCTION : makeTable
10709 *
10710 * DESCRIPTION: make a table of sizes
10711 *
10712 * PARAMETERS :
10713 *
10714 *
10715 *==========================================================================*/
10716void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10717 size_t max_size, int32_t *sizeTable)
10718{
10719 size_t j = 0;
10720 if (size > max_size) {
10721 size = max_size;
10722 }
10723 for (size_t i = 0; i < size; i++) {
10724 sizeTable[j] = dimTable[i].width;
10725 sizeTable[j+1] = dimTable[i].height;
10726 j+=2;
10727 }
10728}
10729
10730/*===========================================================================
10731 * FUNCTION : makeFPSTable
10732 *
10733 * DESCRIPTION: make a table of fps ranges
10734 *
10735 * PARAMETERS :
10736 *
10737 *==========================================================================*/
10738void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10739 size_t max_size, int32_t *fpsRangesTable)
10740{
10741 size_t j = 0;
10742 if (size > max_size) {
10743 size = max_size;
10744 }
10745 for (size_t i = 0; i < size; i++) {
10746 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10747 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10748 j+=2;
10749 }
10750}
10751
10752/*===========================================================================
10753 * FUNCTION : makeOverridesList
10754 *
10755 * DESCRIPTION: make a list of scene mode overrides
10756 *
10757 * PARAMETERS :
10758 *
10759 *
10760 *==========================================================================*/
10761void QCamera3HardwareInterface::makeOverridesList(
10762 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10763 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10764{
10765 /*daemon will give a list of overrides for all scene modes.
10766 However we should send the fwk only the overrides for the scene modes
10767 supported by the framework*/
10768 size_t j = 0;
10769 if (size > max_size) {
10770 size = max_size;
10771 }
10772 size_t focus_count = CAM_FOCUS_MODE_MAX;
10773 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10774 focus_count);
10775 for (size_t i = 0; i < size; i++) {
10776 bool supt = false;
10777 size_t index = supported_indexes[i];
10778 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10779 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10780 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10781 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10782 overridesTable[index].awb_mode);
10783 if (NAME_NOT_FOUND != val) {
10784 overridesList[j+1] = (uint8_t)val;
10785 }
10786 uint8_t focus_override = overridesTable[index].af_mode;
10787 for (size_t k = 0; k < focus_count; k++) {
10788 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10789 supt = true;
10790 break;
10791 }
10792 }
10793 if (supt) {
10794 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10795 focus_override);
10796 if (NAME_NOT_FOUND != val) {
10797 overridesList[j+2] = (uint8_t)val;
10798 }
10799 } else {
10800 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10801 }
10802 j+=3;
10803 }
10804}
10805
10806/*===========================================================================
10807 * FUNCTION : filterJpegSizes
10808 *
10809 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10810 * could be downscaled to
10811 *
10812 * PARAMETERS :
10813 *
10814 * RETURN : length of jpegSizes array
10815 *==========================================================================*/
10816
10817size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10818 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10819 uint8_t downscale_factor)
10820{
10821 if (0 == downscale_factor) {
10822 downscale_factor = 1;
10823 }
10824
10825 int32_t min_width = active_array_size.width / downscale_factor;
10826 int32_t min_height = active_array_size.height / downscale_factor;
10827 size_t jpegSizesCnt = 0;
10828 if (processedSizesCnt > maxCount) {
10829 processedSizesCnt = maxCount;
10830 }
10831 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10832 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10833 jpegSizes[jpegSizesCnt] = processedSizes[i];
10834 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10835 jpegSizesCnt += 2;
10836 }
10837 }
10838 return jpegSizesCnt;
10839}
10840
10841/*===========================================================================
10842 * FUNCTION : computeNoiseModelEntryS
10843 *
10844 * DESCRIPTION: function to map a given sensitivity to the S noise
10845 * model parameters in the DNG noise model.
10846 *
10847 * PARAMETERS : sens : the sensor sensitivity
10848 *
10849 ** RETURN : S (sensor amplification) noise
10850 *
10851 *==========================================================================*/
10852double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10853 double s = gCamCapability[mCameraId]->gradient_S * sens +
10854 gCamCapability[mCameraId]->offset_S;
10855 return ((s < 0.0) ? 0.0 : s);
10856}
10857
10858/*===========================================================================
10859 * FUNCTION : computeNoiseModelEntryO
10860 *
10861 * DESCRIPTION: function to map a given sensitivity to the O noise
10862 * model parameters in the DNG noise model.
10863 *
10864 * PARAMETERS : sens : the sensor sensitivity
10865 *
10866 ** RETURN : O (sensor readout) noise
10867 *
10868 *==========================================================================*/
10869double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10870 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10871 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10872 1.0 : (1.0 * sens / max_analog_sens);
10873 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10874 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10875 return ((o < 0.0) ? 0.0 : o);
10876}
10877
10878/*===========================================================================
10879 * FUNCTION : getSensorSensitivity
10880 *
10881 * DESCRIPTION: convert iso_mode to an integer value
10882 *
10883 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10884 *
10885 ** RETURN : sensitivity supported by sensor
10886 *
10887 *==========================================================================*/
10888int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10889{
10890 int32_t sensitivity;
10891
10892 switch (iso_mode) {
10893 case CAM_ISO_MODE_100:
10894 sensitivity = 100;
10895 break;
10896 case CAM_ISO_MODE_200:
10897 sensitivity = 200;
10898 break;
10899 case CAM_ISO_MODE_400:
10900 sensitivity = 400;
10901 break;
10902 case CAM_ISO_MODE_800:
10903 sensitivity = 800;
10904 break;
10905 case CAM_ISO_MODE_1600:
10906 sensitivity = 1600;
10907 break;
10908 default:
10909 sensitivity = -1;
10910 break;
10911 }
10912 return sensitivity;
10913}
10914
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010915int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010916 if (gEaselManagerClient == nullptr) {
10917 gEaselManagerClient = EaselManagerClient::create();
10918 if (gEaselManagerClient == nullptr) {
10919 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10920 return -ENODEV;
10921 }
10922 }
10923
10924 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010925 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10926 // to connect to Easel.
10927 bool doNotpowerOnEasel =
10928 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10929
10930 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010931 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10932 return OK;
10933 }
10934
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010935 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010936 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010937 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010938 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010939 return res;
10940 }
10941
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010942 EaselManagerClientOpened = true;
10943
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010944 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010945 if (res != OK) {
10946 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10947 }
10948
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010949 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010950 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010951
10952 // Expose enableZsl key only when HDR+ mode is enabled.
10953 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010954 }
10955
10956 return OK;
10957}
10958
Thierry Strudel3d639192016-09-09 11:52:26 -070010959/*===========================================================================
10960 * FUNCTION : getCamInfo
10961 *
10962 * DESCRIPTION: query camera capabilities
10963 *
10964 * PARAMETERS :
10965 * @cameraId : camera Id
10966 * @info : camera info struct to be filled in with camera capabilities
10967 *
10968 * RETURN : int type of status
10969 * NO_ERROR -- success
10970 * none-zero failure code
10971 *==========================================================================*/
10972int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10973 struct camera_info *info)
10974{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010975 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010976 int rc = 0;
10977
10978 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010979
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010980 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070010981 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010982 rc = initHdrPlusClientLocked();
10983 if (rc != OK) {
10984 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10985 pthread_mutex_unlock(&gCamLock);
10986 return rc;
10987 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010988 }
10989
Thierry Strudel3d639192016-09-09 11:52:26 -070010990 if (NULL == gCamCapability[cameraId]) {
10991 rc = initCapabilities(cameraId);
10992 if (rc < 0) {
10993 pthread_mutex_unlock(&gCamLock);
10994 return rc;
10995 }
10996 }
10997
10998 if (NULL == gStaticMetadata[cameraId]) {
10999 rc = initStaticMetadata(cameraId);
11000 if (rc < 0) {
11001 pthread_mutex_unlock(&gCamLock);
11002 return rc;
11003 }
11004 }
11005
11006 switch(gCamCapability[cameraId]->position) {
11007 case CAM_POSITION_BACK:
11008 case CAM_POSITION_BACK_AUX:
11009 info->facing = CAMERA_FACING_BACK;
11010 break;
11011
11012 case CAM_POSITION_FRONT:
11013 case CAM_POSITION_FRONT_AUX:
11014 info->facing = CAMERA_FACING_FRONT;
11015 break;
11016
11017 default:
11018 LOGE("Unknown position type %d for camera id:%d",
11019 gCamCapability[cameraId]->position, cameraId);
11020 rc = -1;
11021 break;
11022 }
11023
11024
11025 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011026#ifndef USE_HAL_3_3
11027 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11028#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011029 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011030#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011031 info->static_camera_characteristics = gStaticMetadata[cameraId];
11032
11033 //For now assume both cameras can operate independently.
11034 info->conflicting_devices = NULL;
11035 info->conflicting_devices_length = 0;
11036
11037 //resource cost is 100 * MIN(1.0, m/M),
11038 //where m is throughput requirement with maximum stream configuration
11039 //and M is CPP maximum throughput.
11040 float max_fps = 0.0;
11041 for (uint32_t i = 0;
11042 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11043 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11044 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11045 }
11046 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11047 gCamCapability[cameraId]->active_array_size.width *
11048 gCamCapability[cameraId]->active_array_size.height * max_fps /
11049 gCamCapability[cameraId]->max_pixel_bandwidth;
11050 info->resource_cost = 100 * MIN(1.0, ratio);
11051 LOGI("camera %d resource cost is %d", cameraId,
11052 info->resource_cost);
11053
11054 pthread_mutex_unlock(&gCamLock);
11055 return rc;
11056}
11057
11058/*===========================================================================
11059 * FUNCTION : translateCapabilityToMetadata
11060 *
11061 * DESCRIPTION: translate the capability into camera_metadata_t
11062 *
11063 * PARAMETERS : type of the request
11064 *
11065 *
11066 * RETURN : success: camera_metadata_t*
11067 * failure: NULL
11068 *
11069 *==========================================================================*/
11070camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11071{
11072 if (mDefaultMetadata[type] != NULL) {
11073 return mDefaultMetadata[type];
11074 }
11075 //first time we are handling this request
11076 //fill up the metadata structure using the wrapper class
11077 CameraMetadata settings;
11078 //translate from cam_capability_t to camera_metadata_tag_t
11079 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11080 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11081 int32_t defaultRequestID = 0;
11082 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11083
11084 /* OIS disable */
11085 char ois_prop[PROPERTY_VALUE_MAX];
11086 memset(ois_prop, 0, sizeof(ois_prop));
11087 property_get("persist.camera.ois.disable", ois_prop, "0");
11088 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11089
11090 /* Force video to use OIS */
11091 char videoOisProp[PROPERTY_VALUE_MAX];
11092 memset(videoOisProp, 0, sizeof(videoOisProp));
11093 property_get("persist.camera.ois.video", videoOisProp, "1");
11094 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011095
11096 // Hybrid AE enable/disable
11097 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11098 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11099 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11100 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11101
Thierry Strudel3d639192016-09-09 11:52:26 -070011102 uint8_t controlIntent = 0;
11103 uint8_t focusMode;
11104 uint8_t vsMode;
11105 uint8_t optStabMode;
11106 uint8_t cacMode;
11107 uint8_t edge_mode;
11108 uint8_t noise_red_mode;
11109 uint8_t tonemap_mode;
11110 bool highQualityModeEntryAvailable = FALSE;
11111 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011112 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011113 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11114 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011115 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011116 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011117 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011118
Thierry Strudel3d639192016-09-09 11:52:26 -070011119 switch (type) {
11120 case CAMERA3_TEMPLATE_PREVIEW:
11121 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11122 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11123 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11124 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11125 edge_mode = ANDROID_EDGE_MODE_FAST;
11126 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11127 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11128 break;
11129 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11130 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11131 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11132 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11133 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11134 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11135 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11136 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11137 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11138 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11139 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11140 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11141 highQualityModeEntryAvailable = TRUE;
11142 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11143 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11144 fastModeEntryAvailable = TRUE;
11145 }
11146 }
11147 if (highQualityModeEntryAvailable) {
11148 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11149 } else if (fastModeEntryAvailable) {
11150 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11151 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011152 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11153 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11154 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011155 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011156 break;
11157 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11158 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11159 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11160 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011161 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11162 edge_mode = ANDROID_EDGE_MODE_FAST;
11163 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11164 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11165 if (forceVideoOis)
11166 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11167 break;
11168 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11169 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11170 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11171 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011172 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11173 edge_mode = ANDROID_EDGE_MODE_FAST;
11174 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11175 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11176 if (forceVideoOis)
11177 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11178 break;
11179 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11180 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11181 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11182 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11183 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11184 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11185 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11186 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11187 break;
11188 case CAMERA3_TEMPLATE_MANUAL:
11189 edge_mode = ANDROID_EDGE_MODE_FAST;
11190 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11191 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11192 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11193 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11194 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11195 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11196 break;
11197 default:
11198 edge_mode = ANDROID_EDGE_MODE_FAST;
11199 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11200 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11201 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11202 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11203 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11204 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11205 break;
11206 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011207 // Set CAC to OFF if underlying device doesn't support
11208 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11209 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11210 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011211 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11212 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11213 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11214 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11215 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11216 }
11217 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011218 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011219 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011220
11221 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11222 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11223 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11224 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11225 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11226 || ois_disable)
11227 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11228 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011229 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011230
11231 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11232 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11233
11234 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11235 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11236
11237 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11238 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11239
11240 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11241 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11242
11243 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11244 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11245
11246 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11247 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11248
11249 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11250 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11251
11252 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11253 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11254
11255 /*flash*/
11256 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11257 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11258
11259 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11260 settings.update(ANDROID_FLASH_FIRING_POWER,
11261 &flashFiringLevel, 1);
11262
11263 /* lens */
11264 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11265 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11266
11267 if (gCamCapability[mCameraId]->filter_densities_count) {
11268 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11269 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11270 gCamCapability[mCameraId]->filter_densities_count);
11271 }
11272
11273 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11274 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11275
Thierry Strudel3d639192016-09-09 11:52:26 -070011276 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11277 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11278
11279 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11280 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11281
11282 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11283 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11284
11285 /* face detection (default to OFF) */
11286 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11287 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11288
Thierry Strudel54dc9782017-02-15 12:12:10 -080011289 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11290 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011291
11292 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11293 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11294
11295 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11296 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11297
Thierry Strudel3d639192016-09-09 11:52:26 -070011298
11299 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11300 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11301
11302 /* Exposure time(Update the Min Exposure Time)*/
11303 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11304 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11305
11306 /* frame duration */
11307 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11308 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11309
11310 /* sensitivity */
11311 static const int32_t default_sensitivity = 100;
11312 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011313#ifndef USE_HAL_3_3
11314 static const int32_t default_isp_sensitivity =
11315 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11316 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11317#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011318
11319 /*edge mode*/
11320 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11321
11322 /*noise reduction mode*/
11323 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11324
11325 /*color correction mode*/
11326 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11327 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11328
11329 /*transform matrix mode*/
11330 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11331
11332 int32_t scaler_crop_region[4];
11333 scaler_crop_region[0] = 0;
11334 scaler_crop_region[1] = 0;
11335 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11336 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11337 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11338
11339 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11340 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11341
11342 /*focus distance*/
11343 float focus_distance = 0.0;
11344 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11345
11346 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011347 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011348 float max_range = 0.0;
11349 float max_fixed_fps = 0.0;
11350 int32_t fps_range[2] = {0, 0};
11351 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11352 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011353 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11354 TEMPLATE_MAX_PREVIEW_FPS) {
11355 continue;
11356 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011357 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11358 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11359 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11360 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11361 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11362 if (range > max_range) {
11363 fps_range[0] =
11364 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11365 fps_range[1] =
11366 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11367 max_range = range;
11368 }
11369 } else {
11370 if (range < 0.01 && max_fixed_fps <
11371 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11372 fps_range[0] =
11373 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11374 fps_range[1] =
11375 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11376 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11377 }
11378 }
11379 }
11380 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11381
11382 /*precapture trigger*/
11383 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11384 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11385
11386 /*af trigger*/
11387 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11388 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11389
11390 /* ae & af regions */
11391 int32_t active_region[] = {
11392 gCamCapability[mCameraId]->active_array_size.left,
11393 gCamCapability[mCameraId]->active_array_size.top,
11394 gCamCapability[mCameraId]->active_array_size.left +
11395 gCamCapability[mCameraId]->active_array_size.width,
11396 gCamCapability[mCameraId]->active_array_size.top +
11397 gCamCapability[mCameraId]->active_array_size.height,
11398 0};
11399 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11400 sizeof(active_region) / sizeof(active_region[0]));
11401 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11402 sizeof(active_region) / sizeof(active_region[0]));
11403
11404 /* black level lock */
11405 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11406 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11407
Thierry Strudel3d639192016-09-09 11:52:26 -070011408 //special defaults for manual template
11409 if (type == CAMERA3_TEMPLATE_MANUAL) {
11410 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11411 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11412
11413 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11414 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11415
11416 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11417 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11418
11419 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11420 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11421
11422 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11423 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11424
11425 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11426 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11427 }
11428
11429
11430 /* TNR
11431 * We'll use this location to determine which modes TNR will be set.
11432 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11433 * This is not to be confused with linking on a per stream basis that decision
11434 * is still on per-session basis and will be handled as part of config stream
11435 */
11436 uint8_t tnr_enable = 0;
11437
11438 if (m_bTnrPreview || m_bTnrVideo) {
11439
11440 switch (type) {
11441 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11442 tnr_enable = 1;
11443 break;
11444
11445 default:
11446 tnr_enable = 0;
11447 break;
11448 }
11449
11450 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11451 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11452 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11453
11454 LOGD("TNR:%d with process plate %d for template:%d",
11455 tnr_enable, tnr_process_type, type);
11456 }
11457
11458 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011459 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011460 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11461
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011462 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011463 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11464
Shuzhen Wang920ea402017-05-03 08:49:39 -070011465 uint8_t related_camera_id = mCameraId;
11466 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011467
11468 /* CDS default */
11469 char prop[PROPERTY_VALUE_MAX];
11470 memset(prop, 0, sizeof(prop));
11471 property_get("persist.camera.CDS", prop, "Auto");
11472 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11473 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11474 if (CAM_CDS_MODE_MAX == cds_mode) {
11475 cds_mode = CAM_CDS_MODE_AUTO;
11476 }
11477
11478 /* Disabling CDS in templates which have TNR enabled*/
11479 if (tnr_enable)
11480 cds_mode = CAM_CDS_MODE_OFF;
11481
11482 int32_t mode = cds_mode;
11483 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011484
Thierry Strudel269c81a2016-10-12 12:13:59 -070011485 /* Manual Convergence AEC Speed is disabled by default*/
11486 float default_aec_speed = 0;
11487 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11488
11489 /* Manual Convergence AWB Speed is disabled by default*/
11490 float default_awb_speed = 0;
11491 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11492
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011493 // Set instant AEC to normal convergence by default
11494 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11495 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11496
Shuzhen Wang19463d72016-03-08 11:09:52 -080011497 /* hybrid ae */
11498 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11499
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011500 if (gExposeEnableZslKey) {
11501 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11502 }
11503
Thierry Strudel3d639192016-09-09 11:52:26 -070011504 mDefaultMetadata[type] = settings.release();
11505
11506 return mDefaultMetadata[type];
11507}
11508
11509/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011510 * FUNCTION : getExpectedFrameDuration
11511 *
11512 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11513 * duration
11514 *
11515 * PARAMETERS :
11516 * @request : request settings
11517 * @frameDuration : The maximum frame duration in nanoseconds
11518 *
11519 * RETURN : None
11520 *==========================================================================*/
11521void QCamera3HardwareInterface::getExpectedFrameDuration(
11522 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11523 if (nullptr == frameDuration) {
11524 return;
11525 }
11526
11527 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11528 find_camera_metadata_ro_entry(request,
11529 ANDROID_SENSOR_EXPOSURE_TIME,
11530 &e);
11531 if (e.count > 0) {
11532 *frameDuration = e.data.i64[0];
11533 }
11534 find_camera_metadata_ro_entry(request,
11535 ANDROID_SENSOR_FRAME_DURATION,
11536 &e);
11537 if (e.count > 0) {
11538 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11539 }
11540}
11541
11542/*===========================================================================
11543 * FUNCTION : calculateMaxExpectedDuration
11544 *
11545 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11546 * current camera settings.
11547 *
11548 * PARAMETERS :
11549 * @request : request settings
11550 *
11551 * RETURN : Expected frame duration in nanoseconds.
11552 *==========================================================================*/
11553nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11554 const camera_metadata_t *request) {
11555 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11556 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11557 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11558 if (e.count == 0) {
11559 return maxExpectedDuration;
11560 }
11561
11562 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11563 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11564 }
11565
11566 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11567 return maxExpectedDuration;
11568 }
11569
11570 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11571 if (e.count == 0) {
11572 return maxExpectedDuration;
11573 }
11574
11575 switch (e.data.u8[0]) {
11576 case ANDROID_CONTROL_AE_MODE_OFF:
11577 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11578 break;
11579 default:
11580 find_camera_metadata_ro_entry(request,
11581 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11582 &e);
11583 if (e.count > 1) {
11584 maxExpectedDuration = 1e9 / e.data.u8[0];
11585 }
11586 break;
11587 }
11588
11589 return maxExpectedDuration;
11590}
11591
11592/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011593 * FUNCTION : setFrameParameters
11594 *
11595 * DESCRIPTION: set parameters per frame as requested in the metadata from
11596 * framework
11597 *
11598 * PARAMETERS :
11599 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011600 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011601 * @blob_request: Whether this request is a blob request or not
11602 *
11603 * RETURN : success: NO_ERROR
11604 * failure:
11605 *==========================================================================*/
11606int QCamera3HardwareInterface::setFrameParameters(
11607 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011608 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011609 int blob_request,
11610 uint32_t snapshotStreamId)
11611{
11612 /*translate from camera_metadata_t type to parm_type_t*/
11613 int rc = 0;
11614 int32_t hal_version = CAM_HAL_V3;
11615
11616 clear_metadata_buffer(mParameters);
11617 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11618 LOGE("Failed to set hal version in the parameters");
11619 return BAD_VALUE;
11620 }
11621
11622 /*we need to update the frame number in the parameters*/
11623 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11624 request->frame_number)) {
11625 LOGE("Failed to set the frame number in the parameters");
11626 return BAD_VALUE;
11627 }
11628
11629 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011630 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011631 LOGE("Failed to set stream type mask in the parameters");
11632 return BAD_VALUE;
11633 }
11634
11635 if (mUpdateDebugLevel) {
11636 uint32_t dummyDebugLevel = 0;
11637 /* The value of dummyDebugLevel is irrelavent. On
11638 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11639 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11640 dummyDebugLevel)) {
11641 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11642 return BAD_VALUE;
11643 }
11644 mUpdateDebugLevel = false;
11645 }
11646
11647 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011648 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011649 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11650 if (blob_request)
11651 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11652 }
11653
11654 return rc;
11655}
11656
11657/*===========================================================================
11658 * FUNCTION : setReprocParameters
11659 *
11660 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11661 * return it.
11662 *
11663 * PARAMETERS :
11664 * @request : request that needs to be serviced
11665 *
11666 * RETURN : success: NO_ERROR
11667 * failure:
11668 *==========================================================================*/
11669int32_t QCamera3HardwareInterface::setReprocParameters(
11670 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11671 uint32_t snapshotStreamId)
11672{
11673 /*translate from camera_metadata_t type to parm_type_t*/
11674 int rc = 0;
11675
11676 if (NULL == request->settings){
11677 LOGE("Reprocess settings cannot be NULL");
11678 return BAD_VALUE;
11679 }
11680
11681 if (NULL == reprocParam) {
11682 LOGE("Invalid reprocessing metadata buffer");
11683 return BAD_VALUE;
11684 }
11685 clear_metadata_buffer(reprocParam);
11686
11687 /*we need to update the frame number in the parameters*/
11688 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11689 request->frame_number)) {
11690 LOGE("Failed to set the frame number in the parameters");
11691 return BAD_VALUE;
11692 }
11693
11694 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11695 if (rc < 0) {
11696 LOGE("Failed to translate reproc request");
11697 return rc;
11698 }
11699
11700 CameraMetadata frame_settings;
11701 frame_settings = request->settings;
11702 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11703 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11704 int32_t *crop_count =
11705 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11706 int32_t *crop_data =
11707 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11708 int32_t *roi_map =
11709 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11710 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11711 cam_crop_data_t crop_meta;
11712 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11713 crop_meta.num_of_streams = 1;
11714 crop_meta.crop_info[0].crop.left = crop_data[0];
11715 crop_meta.crop_info[0].crop.top = crop_data[1];
11716 crop_meta.crop_info[0].crop.width = crop_data[2];
11717 crop_meta.crop_info[0].crop.height = crop_data[3];
11718
11719 crop_meta.crop_info[0].roi_map.left =
11720 roi_map[0];
11721 crop_meta.crop_info[0].roi_map.top =
11722 roi_map[1];
11723 crop_meta.crop_info[0].roi_map.width =
11724 roi_map[2];
11725 crop_meta.crop_info[0].roi_map.height =
11726 roi_map[3];
11727
11728 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11729 rc = BAD_VALUE;
11730 }
11731 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11732 request->input_buffer->stream,
11733 crop_meta.crop_info[0].crop.left,
11734 crop_meta.crop_info[0].crop.top,
11735 crop_meta.crop_info[0].crop.width,
11736 crop_meta.crop_info[0].crop.height);
11737 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11738 request->input_buffer->stream,
11739 crop_meta.crop_info[0].roi_map.left,
11740 crop_meta.crop_info[0].roi_map.top,
11741 crop_meta.crop_info[0].roi_map.width,
11742 crop_meta.crop_info[0].roi_map.height);
11743 } else {
11744 LOGE("Invalid reprocess crop count %d!", *crop_count);
11745 }
11746 } else {
11747 LOGE("No crop data from matching output stream");
11748 }
11749
11750 /* These settings are not needed for regular requests so handle them specially for
11751 reprocess requests; information needed for EXIF tags */
11752 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11753 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11754 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11755 if (NAME_NOT_FOUND != val) {
11756 uint32_t flashMode = (uint32_t)val;
11757 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11758 rc = BAD_VALUE;
11759 }
11760 } else {
11761 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11762 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11763 }
11764 } else {
11765 LOGH("No flash mode in reprocess settings");
11766 }
11767
11768 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11769 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11770 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11771 rc = BAD_VALUE;
11772 }
11773 } else {
11774 LOGH("No flash state in reprocess settings");
11775 }
11776
11777 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11778 uint8_t *reprocessFlags =
11779 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11781 *reprocessFlags)) {
11782 rc = BAD_VALUE;
11783 }
11784 }
11785
Thierry Strudel54dc9782017-02-15 12:12:10 -080011786 // Add exif debug data to internal metadata
11787 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11788 mm_jpeg_debug_exif_params_t *debug_params =
11789 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11790 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11791 // AE
11792 if (debug_params->ae_debug_params_valid == TRUE) {
11793 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11794 debug_params->ae_debug_params);
11795 }
11796 // AWB
11797 if (debug_params->awb_debug_params_valid == TRUE) {
11798 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11799 debug_params->awb_debug_params);
11800 }
11801 // AF
11802 if (debug_params->af_debug_params_valid == TRUE) {
11803 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11804 debug_params->af_debug_params);
11805 }
11806 // ASD
11807 if (debug_params->asd_debug_params_valid == TRUE) {
11808 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11809 debug_params->asd_debug_params);
11810 }
11811 // Stats
11812 if (debug_params->stats_debug_params_valid == TRUE) {
11813 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11814 debug_params->stats_debug_params);
11815 }
11816 // BE Stats
11817 if (debug_params->bestats_debug_params_valid == TRUE) {
11818 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11819 debug_params->bestats_debug_params);
11820 }
11821 // BHIST
11822 if (debug_params->bhist_debug_params_valid == TRUE) {
11823 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11824 debug_params->bhist_debug_params);
11825 }
11826 // 3A Tuning
11827 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11828 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11829 debug_params->q3a_tuning_debug_params);
11830 }
11831 }
11832
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011833 // Add metadata which reprocess needs
11834 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11835 cam_reprocess_info_t *repro_info =
11836 (cam_reprocess_info_t *)frame_settings.find
11837 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011838 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011839 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011840 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011841 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011842 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011843 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011844 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011845 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011846 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011847 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011848 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011849 repro_info->pipeline_flip);
11850 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11851 repro_info->af_roi);
11852 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11853 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011854 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11855 CAM_INTF_PARM_ROTATION metadata then has been added in
11856 translateToHalMetadata. HAL need to keep this new rotation
11857 metadata. Otherwise, the old rotation info saved in the vendor tag
11858 would be used */
11859 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11860 CAM_INTF_PARM_ROTATION, reprocParam) {
11861 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11862 } else {
11863 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011864 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011865 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011866 }
11867
11868 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11869 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11870 roi.width and roi.height would be the final JPEG size.
11871 For now, HAL only checks this for reprocess request */
11872 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11873 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11874 uint8_t *enable =
11875 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11876 if (*enable == TRUE) {
11877 int32_t *crop_data =
11878 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11879 cam_stream_crop_info_t crop_meta;
11880 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11881 crop_meta.stream_id = 0;
11882 crop_meta.crop.left = crop_data[0];
11883 crop_meta.crop.top = crop_data[1];
11884 crop_meta.crop.width = crop_data[2];
11885 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011886 // The JPEG crop roi should match cpp output size
11887 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11888 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11889 crop_meta.roi_map.left = 0;
11890 crop_meta.roi_map.top = 0;
11891 crop_meta.roi_map.width = cpp_crop->crop.width;
11892 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011893 }
11894 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11895 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011896 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011897 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011898 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11899 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011900 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011901 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11902
11903 // Add JPEG scale information
11904 cam_dimension_t scale_dim;
11905 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11906 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11907 int32_t *roi =
11908 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11909 scale_dim.width = roi[2];
11910 scale_dim.height = roi[3];
11911 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11912 scale_dim);
11913 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11914 scale_dim.width, scale_dim.height, mCameraId);
11915 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011916 }
11917 }
11918
11919 return rc;
11920}
11921
11922/*===========================================================================
11923 * FUNCTION : saveRequestSettings
11924 *
11925 * DESCRIPTION: Add any settings that might have changed to the request settings
11926 * and save the settings to be applied on the frame
11927 *
11928 * PARAMETERS :
11929 * @jpegMetadata : the extracted and/or modified jpeg metadata
11930 * @request : request with initial settings
11931 *
11932 * RETURN :
11933 * camera_metadata_t* : pointer to the saved request settings
11934 *==========================================================================*/
11935camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11936 const CameraMetadata &jpegMetadata,
11937 camera3_capture_request_t *request)
11938{
11939 camera_metadata_t *resultMetadata;
11940 CameraMetadata camMetadata;
11941 camMetadata = request->settings;
11942
11943 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11944 int32_t thumbnail_size[2];
11945 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11946 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11947 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11948 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11949 }
11950
11951 if (request->input_buffer != NULL) {
11952 uint8_t reprocessFlags = 1;
11953 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11954 (uint8_t*)&reprocessFlags,
11955 sizeof(reprocessFlags));
11956 }
11957
11958 resultMetadata = camMetadata.release();
11959 return resultMetadata;
11960}
11961
11962/*===========================================================================
11963 * FUNCTION : setHalFpsRange
11964 *
11965 * DESCRIPTION: set FPS range parameter
11966 *
11967 *
11968 * PARAMETERS :
11969 * @settings : Metadata from framework
11970 * @hal_metadata: Metadata buffer
11971 *
11972 *
11973 * RETURN : success: NO_ERROR
11974 * failure:
11975 *==========================================================================*/
11976int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11977 metadata_buffer_t *hal_metadata)
11978{
11979 int32_t rc = NO_ERROR;
11980 cam_fps_range_t fps_range;
11981 fps_range.min_fps = (float)
11982 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11983 fps_range.max_fps = (float)
11984 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11985 fps_range.video_min_fps = fps_range.min_fps;
11986 fps_range.video_max_fps = fps_range.max_fps;
11987
11988 LOGD("aeTargetFpsRange fps: [%f %f]",
11989 fps_range.min_fps, fps_range.max_fps);
11990 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11991 * follows:
11992 * ---------------------------------------------------------------|
11993 * Video stream is absent in configure_streams |
11994 * (Camcorder preview before the first video record |
11995 * ---------------------------------------------------------------|
11996 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11997 * | | | vid_min/max_fps|
11998 * ---------------------------------------------------------------|
11999 * NO | [ 30, 240] | 240 | [240, 240] |
12000 * |-------------|-------------|----------------|
12001 * | [240, 240] | 240 | [240, 240] |
12002 * ---------------------------------------------------------------|
12003 * Video stream is present in configure_streams |
12004 * ---------------------------------------------------------------|
12005 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12006 * | | | vid_min/max_fps|
12007 * ---------------------------------------------------------------|
12008 * NO | [ 30, 240] | 240 | [240, 240] |
12009 * (camcorder prev |-------------|-------------|----------------|
12010 * after video rec | [240, 240] | 240 | [240, 240] |
12011 * is stopped) | | | |
12012 * ---------------------------------------------------------------|
12013 * YES | [ 30, 240] | 240 | [240, 240] |
12014 * |-------------|-------------|----------------|
12015 * | [240, 240] | 240 | [240, 240] |
12016 * ---------------------------------------------------------------|
12017 * When Video stream is absent in configure_streams,
12018 * preview fps = sensor_fps / batchsize
12019 * Eg: for 240fps at batchSize 4, preview = 60fps
12020 * for 120fps at batchSize 4, preview = 30fps
12021 *
12022 * When video stream is present in configure_streams, preview fps is as per
12023 * the ratio of preview buffers to video buffers requested in process
12024 * capture request
12025 */
12026 mBatchSize = 0;
12027 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12028 fps_range.min_fps = fps_range.video_max_fps;
12029 fps_range.video_min_fps = fps_range.video_max_fps;
12030 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12031 fps_range.max_fps);
12032 if (NAME_NOT_FOUND != val) {
12033 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12034 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12035 return BAD_VALUE;
12036 }
12037
12038 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12039 /* If batchmode is currently in progress and the fps changes,
12040 * set the flag to restart the sensor */
12041 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12042 (mHFRVideoFps != fps_range.max_fps)) {
12043 mNeedSensorRestart = true;
12044 }
12045 mHFRVideoFps = fps_range.max_fps;
12046 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12047 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12048 mBatchSize = MAX_HFR_BATCH_SIZE;
12049 }
12050 }
12051 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12052
12053 }
12054 } else {
12055 /* HFR mode is session param in backend/ISP. This should be reset when
12056 * in non-HFR mode */
12057 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12058 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12059 return BAD_VALUE;
12060 }
12061 }
12062 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12063 return BAD_VALUE;
12064 }
12065 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12066 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12067 return rc;
12068}
12069
12070/*===========================================================================
12071 * FUNCTION : translateToHalMetadata
12072 *
12073 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12074 *
12075 *
12076 * PARAMETERS :
12077 * @request : request sent from framework
12078 *
12079 *
12080 * RETURN : success: NO_ERROR
12081 * failure:
12082 *==========================================================================*/
12083int QCamera3HardwareInterface::translateToHalMetadata
12084 (const camera3_capture_request_t *request,
12085 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012086 uint32_t snapshotStreamId) {
12087 if (request == nullptr || hal_metadata == nullptr) {
12088 return BAD_VALUE;
12089 }
12090
12091 int64_t minFrameDuration = getMinFrameDuration(request);
12092
12093 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12094 minFrameDuration);
12095}
12096
12097int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12098 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12099 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12100
Thierry Strudel3d639192016-09-09 11:52:26 -070012101 int rc = 0;
12102 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012103 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012104
12105 /* Do not change the order of the following list unless you know what you are
12106 * doing.
12107 * The order is laid out in such a way that parameters in the front of the table
12108 * may be used to override the parameters later in the table. Examples are:
12109 * 1. META_MODE should precede AEC/AWB/AF MODE
12110 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12111 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12112 * 4. Any mode should precede it's corresponding settings
12113 */
12114 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12115 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12116 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12117 rc = BAD_VALUE;
12118 }
12119 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12120 if (rc != NO_ERROR) {
12121 LOGE("extractSceneMode failed");
12122 }
12123 }
12124
12125 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12126 uint8_t fwk_aeMode =
12127 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12128 uint8_t aeMode;
12129 int32_t redeye;
12130
12131 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12132 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012133 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12134 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012135 } else {
12136 aeMode = CAM_AE_MODE_ON;
12137 }
12138 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12139 redeye = 1;
12140 } else {
12141 redeye = 0;
12142 }
12143
12144 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12145 fwk_aeMode);
12146 if (NAME_NOT_FOUND != val) {
12147 int32_t flashMode = (int32_t)val;
12148 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12149 }
12150
12151 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12152 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12153 rc = BAD_VALUE;
12154 }
12155 }
12156
12157 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12158 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12159 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12160 fwk_whiteLevel);
12161 if (NAME_NOT_FOUND != val) {
12162 uint8_t whiteLevel = (uint8_t)val;
12163 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12164 rc = BAD_VALUE;
12165 }
12166 }
12167 }
12168
12169 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12170 uint8_t fwk_cacMode =
12171 frame_settings.find(
12172 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12173 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12174 fwk_cacMode);
12175 if (NAME_NOT_FOUND != val) {
12176 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12177 bool entryAvailable = FALSE;
12178 // Check whether Frameworks set CAC mode is supported in device or not
12179 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12180 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12181 entryAvailable = TRUE;
12182 break;
12183 }
12184 }
12185 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12186 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12187 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12188 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12189 if (entryAvailable == FALSE) {
12190 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12191 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12192 } else {
12193 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12194 // High is not supported and so set the FAST as spec say's underlying
12195 // device implementation can be the same for both modes.
12196 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12197 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12198 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12199 // in order to avoid the fps drop due to high quality
12200 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12201 } else {
12202 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12203 }
12204 }
12205 }
12206 LOGD("Final cacMode is %d", cacMode);
12207 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12208 rc = BAD_VALUE;
12209 }
12210 } else {
12211 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12212 }
12213 }
12214
Jason Lee84ae9972017-02-24 13:24:24 -080012215 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012216 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012217 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012218 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012219 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12220 fwk_focusMode);
12221 if (NAME_NOT_FOUND != val) {
12222 uint8_t focusMode = (uint8_t)val;
12223 LOGD("set focus mode %d", focusMode);
12224 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12225 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12226 rc = BAD_VALUE;
12227 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012228 }
12229 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012230 } else {
12231 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12232 LOGE("Focus forced to infinity %d", focusMode);
12233 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12234 rc = BAD_VALUE;
12235 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012236 }
12237
Jason Lee84ae9972017-02-24 13:24:24 -080012238 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12239 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012240 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12241 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12242 focalDistance)) {
12243 rc = BAD_VALUE;
12244 }
12245 }
12246
12247 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12248 uint8_t fwk_antibandingMode =
12249 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12250 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12251 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12252 if (NAME_NOT_FOUND != val) {
12253 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012254 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12255 if (m60HzZone) {
12256 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12257 } else {
12258 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12259 }
12260 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012261 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12262 hal_antibandingMode)) {
12263 rc = BAD_VALUE;
12264 }
12265 }
12266 }
12267
12268 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12269 int32_t expCompensation = frame_settings.find(
12270 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12271 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12272 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12273 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12274 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012275 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012276 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12277 expCompensation)) {
12278 rc = BAD_VALUE;
12279 }
12280 }
12281
12282 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12283 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12284 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12285 rc = BAD_VALUE;
12286 }
12287 }
12288 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12289 rc = setHalFpsRange(frame_settings, hal_metadata);
12290 if (rc != NO_ERROR) {
12291 LOGE("setHalFpsRange failed");
12292 }
12293 }
12294
12295 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12296 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12298 rc = BAD_VALUE;
12299 }
12300 }
12301
12302 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12303 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12304 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12305 fwk_effectMode);
12306 if (NAME_NOT_FOUND != val) {
12307 uint8_t effectMode = (uint8_t)val;
12308 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12309 rc = BAD_VALUE;
12310 }
12311 }
12312 }
12313
12314 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12315 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12316 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12317 colorCorrectMode)) {
12318 rc = BAD_VALUE;
12319 }
12320 }
12321
12322 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12323 cam_color_correct_gains_t colorCorrectGains;
12324 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12325 colorCorrectGains.gains[i] =
12326 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12327 }
12328 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12329 colorCorrectGains)) {
12330 rc = BAD_VALUE;
12331 }
12332 }
12333
12334 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12335 cam_color_correct_matrix_t colorCorrectTransform;
12336 cam_rational_type_t transform_elem;
12337 size_t num = 0;
12338 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12339 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12340 transform_elem.numerator =
12341 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12342 transform_elem.denominator =
12343 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12344 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12345 num++;
12346 }
12347 }
12348 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12349 colorCorrectTransform)) {
12350 rc = BAD_VALUE;
12351 }
12352 }
12353
12354 cam_trigger_t aecTrigger;
12355 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12356 aecTrigger.trigger_id = -1;
12357 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12358 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12359 aecTrigger.trigger =
12360 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12361 aecTrigger.trigger_id =
12362 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12364 aecTrigger)) {
12365 rc = BAD_VALUE;
12366 }
12367 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12368 aecTrigger.trigger, aecTrigger.trigger_id);
12369 }
12370
12371 /*af_trigger must come with a trigger id*/
12372 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12373 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12374 cam_trigger_t af_trigger;
12375 af_trigger.trigger =
12376 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12377 af_trigger.trigger_id =
12378 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12379 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12380 rc = BAD_VALUE;
12381 }
12382 LOGD("AfTrigger: %d AfTriggerID: %d",
12383 af_trigger.trigger, af_trigger.trigger_id);
12384 }
12385
12386 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12387 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12388 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12389 rc = BAD_VALUE;
12390 }
12391 }
12392 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12393 cam_edge_application_t edge_application;
12394 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012395
Thierry Strudel3d639192016-09-09 11:52:26 -070012396 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12397 edge_application.sharpness = 0;
12398 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012399 edge_application.sharpness =
12400 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12401 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12402 int32_t sharpness =
12403 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12404 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12405 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12406 LOGD("Setting edge mode sharpness %d", sharpness);
12407 edge_application.sharpness = sharpness;
12408 }
12409 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012410 }
12411 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12412 rc = BAD_VALUE;
12413 }
12414 }
12415
12416 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12417 int32_t respectFlashMode = 1;
12418 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12419 uint8_t fwk_aeMode =
12420 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012421 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12422 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12423 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012424 respectFlashMode = 0;
12425 LOGH("AE Mode controls flash, ignore android.flash.mode");
12426 }
12427 }
12428 if (respectFlashMode) {
12429 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12430 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12431 LOGH("flash mode after mapping %d", val);
12432 // To check: CAM_INTF_META_FLASH_MODE usage
12433 if (NAME_NOT_FOUND != val) {
12434 uint8_t flashMode = (uint8_t)val;
12435 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12436 rc = BAD_VALUE;
12437 }
12438 }
12439 }
12440 }
12441
12442 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12443 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12444 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12445 rc = BAD_VALUE;
12446 }
12447 }
12448
12449 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12450 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12451 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12452 flashFiringTime)) {
12453 rc = BAD_VALUE;
12454 }
12455 }
12456
12457 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12458 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12459 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12460 hotPixelMode)) {
12461 rc = BAD_VALUE;
12462 }
12463 }
12464
12465 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12466 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12467 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12468 lensAperture)) {
12469 rc = BAD_VALUE;
12470 }
12471 }
12472
12473 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12474 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12475 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12476 filterDensity)) {
12477 rc = BAD_VALUE;
12478 }
12479 }
12480
12481 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12482 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12483 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12484 focalLength)) {
12485 rc = BAD_VALUE;
12486 }
12487 }
12488
12489 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12490 uint8_t optStabMode =
12491 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12492 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12493 optStabMode)) {
12494 rc = BAD_VALUE;
12495 }
12496 }
12497
12498 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12499 uint8_t videoStabMode =
12500 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12501 LOGD("videoStabMode from APP = %d", videoStabMode);
12502 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12503 videoStabMode)) {
12504 rc = BAD_VALUE;
12505 }
12506 }
12507
12508
12509 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12510 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12512 noiseRedMode)) {
12513 rc = BAD_VALUE;
12514 }
12515 }
12516
12517 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12518 float reprocessEffectiveExposureFactor =
12519 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12520 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12521 reprocessEffectiveExposureFactor)) {
12522 rc = BAD_VALUE;
12523 }
12524 }
12525
12526 cam_crop_region_t scalerCropRegion;
12527 bool scalerCropSet = false;
12528 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12529 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12530 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12531 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12532 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12533
12534 // Map coordinate system from active array to sensor output.
12535 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12536 scalerCropRegion.width, scalerCropRegion.height);
12537
12538 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12539 scalerCropRegion)) {
12540 rc = BAD_VALUE;
12541 }
12542 scalerCropSet = true;
12543 }
12544
12545 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12546 int64_t sensorExpTime =
12547 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12548 LOGD("setting sensorExpTime %lld", sensorExpTime);
12549 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12550 sensorExpTime)) {
12551 rc = BAD_VALUE;
12552 }
12553 }
12554
12555 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12556 int64_t sensorFrameDuration =
12557 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012558 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12559 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12560 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12561 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12562 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12563 sensorFrameDuration)) {
12564 rc = BAD_VALUE;
12565 }
12566 }
12567
12568 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12569 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12570 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12571 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12572 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12573 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12574 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12575 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12576 sensorSensitivity)) {
12577 rc = BAD_VALUE;
12578 }
12579 }
12580
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012581#ifndef USE_HAL_3_3
12582 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12583 int32_t ispSensitivity =
12584 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12585 if (ispSensitivity <
12586 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12587 ispSensitivity =
12588 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12589 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12590 }
12591 if (ispSensitivity >
12592 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12593 ispSensitivity =
12594 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12595 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12596 }
12597 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12598 ispSensitivity)) {
12599 rc = BAD_VALUE;
12600 }
12601 }
12602#endif
12603
Thierry Strudel3d639192016-09-09 11:52:26 -070012604 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12605 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12606 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12607 rc = BAD_VALUE;
12608 }
12609 }
12610
12611 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12612 uint8_t fwk_facedetectMode =
12613 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12614
12615 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12616 fwk_facedetectMode);
12617
12618 if (NAME_NOT_FOUND != val) {
12619 uint8_t facedetectMode = (uint8_t)val;
12620 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12621 facedetectMode)) {
12622 rc = BAD_VALUE;
12623 }
12624 }
12625 }
12626
Thierry Strudel54dc9782017-02-15 12:12:10 -080012627 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012628 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012629 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012630 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12631 histogramMode)) {
12632 rc = BAD_VALUE;
12633 }
12634 }
12635
12636 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12637 uint8_t sharpnessMapMode =
12638 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12639 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12640 sharpnessMapMode)) {
12641 rc = BAD_VALUE;
12642 }
12643 }
12644
12645 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12646 uint8_t tonemapMode =
12647 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12648 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12649 rc = BAD_VALUE;
12650 }
12651 }
12652 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12653 /*All tonemap channels will have the same number of points*/
12654 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12655 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12656 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12657 cam_rgb_tonemap_curves tonemapCurves;
12658 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12659 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12660 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12661 tonemapCurves.tonemap_points_cnt,
12662 CAM_MAX_TONEMAP_CURVE_SIZE);
12663 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12664 }
12665
12666 /* ch0 = G*/
12667 size_t point = 0;
12668 cam_tonemap_curve_t tonemapCurveGreen;
12669 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12670 for (size_t j = 0; j < 2; j++) {
12671 tonemapCurveGreen.tonemap_points[i][j] =
12672 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12673 point++;
12674 }
12675 }
12676 tonemapCurves.curves[0] = tonemapCurveGreen;
12677
12678 /* ch 1 = B */
12679 point = 0;
12680 cam_tonemap_curve_t tonemapCurveBlue;
12681 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12682 for (size_t j = 0; j < 2; j++) {
12683 tonemapCurveBlue.tonemap_points[i][j] =
12684 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12685 point++;
12686 }
12687 }
12688 tonemapCurves.curves[1] = tonemapCurveBlue;
12689
12690 /* ch 2 = R */
12691 point = 0;
12692 cam_tonemap_curve_t tonemapCurveRed;
12693 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12694 for (size_t j = 0; j < 2; j++) {
12695 tonemapCurveRed.tonemap_points[i][j] =
12696 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12697 point++;
12698 }
12699 }
12700 tonemapCurves.curves[2] = tonemapCurveRed;
12701
12702 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12703 tonemapCurves)) {
12704 rc = BAD_VALUE;
12705 }
12706 }
12707
12708 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12709 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12710 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12711 captureIntent)) {
12712 rc = BAD_VALUE;
12713 }
12714 }
12715
12716 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12717 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12718 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12719 blackLevelLock)) {
12720 rc = BAD_VALUE;
12721 }
12722 }
12723
12724 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12725 uint8_t lensShadingMapMode =
12726 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12727 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12728 lensShadingMapMode)) {
12729 rc = BAD_VALUE;
12730 }
12731 }
12732
12733 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12734 cam_area_t roi;
12735 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012736 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012737
12738 // Map coordinate system from active array to sensor output.
12739 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12740 roi.rect.height);
12741
12742 if (scalerCropSet) {
12743 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12744 }
12745 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12746 rc = BAD_VALUE;
12747 }
12748 }
12749
12750 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12751 cam_area_t roi;
12752 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012753 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012754
12755 // Map coordinate system from active array to sensor output.
12756 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12757 roi.rect.height);
12758
12759 if (scalerCropSet) {
12760 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12761 }
12762 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12763 rc = BAD_VALUE;
12764 }
12765 }
12766
12767 // CDS for non-HFR non-video mode
12768 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12769 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12770 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12771 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12772 LOGE("Invalid CDS mode %d!", *fwk_cds);
12773 } else {
12774 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12775 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12776 rc = BAD_VALUE;
12777 }
12778 }
12779 }
12780
Thierry Strudel04e026f2016-10-10 11:27:36 -070012781 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012782 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012783 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012784 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12785 }
12786 if (m_bVideoHdrEnabled)
12787 vhdr = CAM_VIDEO_HDR_MODE_ON;
12788
Thierry Strudel54dc9782017-02-15 12:12:10 -080012789 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12790
12791 if(vhdr != curr_hdr_state)
12792 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12793
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012794 rc = setVideoHdrMode(mParameters, vhdr);
12795 if (rc != NO_ERROR) {
12796 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012797 }
12798
12799 //IR
12800 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12801 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12802 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012803 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12804 uint8_t isIRon = 0;
12805
12806 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012807 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12808 LOGE("Invalid IR mode %d!", fwk_ir);
12809 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012810 if(isIRon != curr_ir_state )
12811 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12812
Thierry Strudel04e026f2016-10-10 11:27:36 -070012813 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12814 CAM_INTF_META_IR_MODE, fwk_ir)) {
12815 rc = BAD_VALUE;
12816 }
12817 }
12818 }
12819
Thierry Strudel54dc9782017-02-15 12:12:10 -080012820 //Binning Correction Mode
12821 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12822 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12823 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12824 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12825 || (0 > fwk_binning_correction)) {
12826 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12827 } else {
12828 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12829 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12830 rc = BAD_VALUE;
12831 }
12832 }
12833 }
12834
Thierry Strudel269c81a2016-10-12 12:13:59 -070012835 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12836 float aec_speed;
12837 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12838 LOGD("AEC Speed :%f", aec_speed);
12839 if ( aec_speed < 0 ) {
12840 LOGE("Invalid AEC mode %f!", aec_speed);
12841 } else {
12842 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12843 aec_speed)) {
12844 rc = BAD_VALUE;
12845 }
12846 }
12847 }
12848
12849 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12850 float awb_speed;
12851 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12852 LOGD("AWB Speed :%f", awb_speed);
12853 if ( awb_speed < 0 ) {
12854 LOGE("Invalid AWB mode %f!", awb_speed);
12855 } else {
12856 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12857 awb_speed)) {
12858 rc = BAD_VALUE;
12859 }
12860 }
12861 }
12862
Thierry Strudel3d639192016-09-09 11:52:26 -070012863 // TNR
12864 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12865 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12866 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012867 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012868 cam_denoise_param_t tnr;
12869 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12870 tnr.process_plates =
12871 (cam_denoise_process_type_t)frame_settings.find(
12872 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12873 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012874
12875 if(b_TnrRequested != curr_tnr_state)
12876 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12877
Thierry Strudel3d639192016-09-09 11:52:26 -070012878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12879 rc = BAD_VALUE;
12880 }
12881 }
12882
Thierry Strudel54dc9782017-02-15 12:12:10 -080012883 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012884 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012885 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012886 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12887 *exposure_metering_mode)) {
12888 rc = BAD_VALUE;
12889 }
12890 }
12891
Thierry Strudel3d639192016-09-09 11:52:26 -070012892 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12893 int32_t fwk_testPatternMode =
12894 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12895 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12896 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12897
12898 if (NAME_NOT_FOUND != testPatternMode) {
12899 cam_test_pattern_data_t testPatternData;
12900 memset(&testPatternData, 0, sizeof(testPatternData));
12901 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12902 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12903 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12904 int32_t *fwk_testPatternData =
12905 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12906 testPatternData.r = fwk_testPatternData[0];
12907 testPatternData.b = fwk_testPatternData[3];
12908 switch (gCamCapability[mCameraId]->color_arrangement) {
12909 case CAM_FILTER_ARRANGEMENT_RGGB:
12910 case CAM_FILTER_ARRANGEMENT_GRBG:
12911 testPatternData.gr = fwk_testPatternData[1];
12912 testPatternData.gb = fwk_testPatternData[2];
12913 break;
12914 case CAM_FILTER_ARRANGEMENT_GBRG:
12915 case CAM_FILTER_ARRANGEMENT_BGGR:
12916 testPatternData.gr = fwk_testPatternData[2];
12917 testPatternData.gb = fwk_testPatternData[1];
12918 break;
12919 default:
12920 LOGE("color arrangement %d is not supported",
12921 gCamCapability[mCameraId]->color_arrangement);
12922 break;
12923 }
12924 }
12925 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12926 testPatternData)) {
12927 rc = BAD_VALUE;
12928 }
12929 } else {
12930 LOGE("Invalid framework sensor test pattern mode %d",
12931 fwk_testPatternMode);
12932 }
12933 }
12934
12935 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12936 size_t count = 0;
12937 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12938 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12939 gps_coords.data.d, gps_coords.count, count);
12940 if (gps_coords.count != count) {
12941 rc = BAD_VALUE;
12942 }
12943 }
12944
12945 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12946 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12947 size_t count = 0;
12948 const char *gps_methods_src = (const char *)
12949 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12950 memset(gps_methods, '\0', sizeof(gps_methods));
12951 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12952 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12953 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12954 if (GPS_PROCESSING_METHOD_SIZE != count) {
12955 rc = BAD_VALUE;
12956 }
12957 }
12958
12959 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12960 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12961 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12962 gps_timestamp)) {
12963 rc = BAD_VALUE;
12964 }
12965 }
12966
12967 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12968 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12969 cam_rotation_info_t rotation_info;
12970 if (orientation == 0) {
12971 rotation_info.rotation = ROTATE_0;
12972 } else if (orientation == 90) {
12973 rotation_info.rotation = ROTATE_90;
12974 } else if (orientation == 180) {
12975 rotation_info.rotation = ROTATE_180;
12976 } else if (orientation == 270) {
12977 rotation_info.rotation = ROTATE_270;
12978 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012979 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012980 rotation_info.streamId = snapshotStreamId;
12981 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12982 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12983 rc = BAD_VALUE;
12984 }
12985 }
12986
12987 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12988 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12989 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12990 rc = BAD_VALUE;
12991 }
12992 }
12993
12994 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12995 uint32_t thumb_quality = (uint32_t)
12996 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12997 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12998 thumb_quality)) {
12999 rc = BAD_VALUE;
13000 }
13001 }
13002
13003 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13004 cam_dimension_t dim;
13005 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13006 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13007 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13008 rc = BAD_VALUE;
13009 }
13010 }
13011
13012 // Internal metadata
13013 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13014 size_t count = 0;
13015 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13016 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13017 privatedata.data.i32, privatedata.count, count);
13018 if (privatedata.count != count) {
13019 rc = BAD_VALUE;
13020 }
13021 }
13022
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013023 // ISO/Exposure Priority
13024 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13025 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13026 cam_priority_mode_t mode =
13027 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13028 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13029 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13030 use_iso_exp_pty.previewOnly = FALSE;
13031 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13032 use_iso_exp_pty.value = *ptr;
13033
13034 if(CAM_ISO_PRIORITY == mode) {
13035 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13036 use_iso_exp_pty)) {
13037 rc = BAD_VALUE;
13038 }
13039 }
13040 else {
13041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13042 use_iso_exp_pty)) {
13043 rc = BAD_VALUE;
13044 }
13045 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013046
13047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13048 rc = BAD_VALUE;
13049 }
13050 }
13051 } else {
13052 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13053 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013054 }
13055 }
13056
13057 // Saturation
13058 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13059 int32_t* use_saturation =
13060 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13061 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13062 rc = BAD_VALUE;
13063 }
13064 }
13065
Thierry Strudel3d639192016-09-09 11:52:26 -070013066 // EV step
13067 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13068 gCamCapability[mCameraId]->exp_compensation_step)) {
13069 rc = BAD_VALUE;
13070 }
13071
13072 // CDS info
13073 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13074 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13075 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13076
13077 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13078 CAM_INTF_META_CDS_DATA, *cdsData)) {
13079 rc = BAD_VALUE;
13080 }
13081 }
13082
Shuzhen Wang19463d72016-03-08 11:09:52 -080013083 // Hybrid AE
13084 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13085 uint8_t *hybrid_ae = (uint8_t *)
13086 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
13087
13088 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13089 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13090 rc = BAD_VALUE;
13091 }
13092 }
13093
Shuzhen Wang14415f52016-11-16 18:26:18 -080013094 // Histogram
13095 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13096 uint8_t histogramMode =
13097 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13099 histogramMode)) {
13100 rc = BAD_VALUE;
13101 }
13102 }
13103
13104 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13105 int32_t histogramBins =
13106 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13107 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13108 histogramBins)) {
13109 rc = BAD_VALUE;
13110 }
13111 }
13112
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013113 // Tracking AF
13114 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13115 uint8_t trackingAfTrigger =
13116 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13117 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13118 trackingAfTrigger)) {
13119 rc = BAD_VALUE;
13120 }
13121 }
13122
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013123 // Makernote
13124 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13125 if (entry.count != 0) {
13126 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13127 cam_makernote_t makernote;
13128 makernote.length = entry.count;
13129 memcpy(makernote.data, entry.data.u8, makernote.length);
13130 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13131 rc = BAD_VALUE;
13132 }
13133 } else {
13134 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13135 MAX_MAKERNOTE_LENGTH);
13136 rc = BAD_VALUE;
13137 }
13138 }
13139
Thierry Strudel3d639192016-09-09 11:52:26 -070013140 return rc;
13141}
13142
13143/*===========================================================================
13144 * FUNCTION : captureResultCb
13145 *
13146 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13147 *
13148 * PARAMETERS :
13149 * @frame : frame information from mm-camera-interface
13150 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13151 * @userdata: userdata
13152 *
13153 * RETURN : NONE
13154 *==========================================================================*/
13155void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13156 camera3_stream_buffer_t *buffer,
13157 uint32_t frame_number, bool isInputBuffer, void *userdata)
13158{
13159 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13160 if (hw == NULL) {
13161 LOGE("Invalid hw %p", hw);
13162 return;
13163 }
13164
13165 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13166 return;
13167}
13168
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013169/*===========================================================================
13170 * FUNCTION : setBufferErrorStatus
13171 *
13172 * DESCRIPTION: Callback handler for channels to report any buffer errors
13173 *
13174 * PARAMETERS :
13175 * @ch : Channel on which buffer error is reported from
13176 * @frame_number : frame number on which buffer error is reported on
13177 * @buffer_status : buffer error status
13178 * @userdata: userdata
13179 *
13180 * RETURN : NONE
13181 *==========================================================================*/
13182void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13183 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13184{
13185 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13186 if (hw == NULL) {
13187 LOGE("Invalid hw %p", hw);
13188 return;
13189 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013190
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013191 hw->setBufferErrorStatus(ch, frame_number, err);
13192 return;
13193}
13194
13195void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13196 uint32_t frameNumber, camera3_buffer_status_t err)
13197{
13198 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13199 pthread_mutex_lock(&mMutex);
13200
13201 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13202 if (req.frame_number != frameNumber)
13203 continue;
13204 for (auto& k : req.mPendingBufferList) {
13205 if(k.stream->priv == ch) {
13206 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13207 }
13208 }
13209 }
13210
13211 pthread_mutex_unlock(&mMutex);
13212 return;
13213}
Thierry Strudel3d639192016-09-09 11:52:26 -070013214/*===========================================================================
13215 * FUNCTION : initialize
13216 *
13217 * DESCRIPTION: Pass framework callback pointers to HAL
13218 *
13219 * PARAMETERS :
13220 *
13221 *
13222 * RETURN : Success : 0
13223 * Failure: -ENODEV
13224 *==========================================================================*/
13225
13226int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13227 const camera3_callback_ops_t *callback_ops)
13228{
13229 LOGD("E");
13230 QCamera3HardwareInterface *hw =
13231 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13232 if (!hw) {
13233 LOGE("NULL camera device");
13234 return -ENODEV;
13235 }
13236
13237 int rc = hw->initialize(callback_ops);
13238 LOGD("X");
13239 return rc;
13240}
13241
13242/*===========================================================================
13243 * FUNCTION : configure_streams
13244 *
13245 * DESCRIPTION:
13246 *
13247 * PARAMETERS :
13248 *
13249 *
13250 * RETURN : Success: 0
13251 * Failure: -EINVAL (if stream configuration is invalid)
13252 * -ENODEV (fatal error)
13253 *==========================================================================*/
13254
13255int QCamera3HardwareInterface::configure_streams(
13256 const struct camera3_device *device,
13257 camera3_stream_configuration_t *stream_list)
13258{
13259 LOGD("E");
13260 QCamera3HardwareInterface *hw =
13261 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13262 if (!hw) {
13263 LOGE("NULL camera device");
13264 return -ENODEV;
13265 }
13266 int rc = hw->configureStreams(stream_list);
13267 LOGD("X");
13268 return rc;
13269}
13270
13271/*===========================================================================
13272 * FUNCTION : construct_default_request_settings
13273 *
13274 * DESCRIPTION: Configure a settings buffer to meet the required use case
13275 *
13276 * PARAMETERS :
13277 *
13278 *
13279 * RETURN : Success: Return valid metadata
13280 * Failure: Return NULL
13281 *==========================================================================*/
13282const camera_metadata_t* QCamera3HardwareInterface::
13283 construct_default_request_settings(const struct camera3_device *device,
13284 int type)
13285{
13286
13287 LOGD("E");
13288 camera_metadata_t* fwk_metadata = NULL;
13289 QCamera3HardwareInterface *hw =
13290 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13291 if (!hw) {
13292 LOGE("NULL camera device");
13293 return NULL;
13294 }
13295
13296 fwk_metadata = hw->translateCapabilityToMetadata(type);
13297
13298 LOGD("X");
13299 return fwk_metadata;
13300}
13301
13302/*===========================================================================
13303 * FUNCTION : process_capture_request
13304 *
13305 * DESCRIPTION:
13306 *
13307 * PARAMETERS :
13308 *
13309 *
13310 * RETURN :
13311 *==========================================================================*/
13312int QCamera3HardwareInterface::process_capture_request(
13313 const struct camera3_device *device,
13314 camera3_capture_request_t *request)
13315{
13316 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013317 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013318 QCamera3HardwareInterface *hw =
13319 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13320 if (!hw) {
13321 LOGE("NULL camera device");
13322 return -EINVAL;
13323 }
13324
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013325 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013326 LOGD("X");
13327 return rc;
13328}
13329
13330/*===========================================================================
13331 * FUNCTION : dump
13332 *
13333 * DESCRIPTION:
13334 *
13335 * PARAMETERS :
13336 *
13337 *
13338 * RETURN :
13339 *==========================================================================*/
13340
13341void QCamera3HardwareInterface::dump(
13342 const struct camera3_device *device, int fd)
13343{
13344 /* Log level property is read when "adb shell dumpsys media.camera" is
13345 called so that the log level can be controlled without restarting
13346 the media server */
13347 getLogLevel();
13348
13349 LOGD("E");
13350 QCamera3HardwareInterface *hw =
13351 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13352 if (!hw) {
13353 LOGE("NULL camera device");
13354 return;
13355 }
13356
13357 hw->dump(fd);
13358 LOGD("X");
13359 return;
13360}
13361
13362/*===========================================================================
13363 * FUNCTION : flush
13364 *
13365 * DESCRIPTION:
13366 *
13367 * PARAMETERS :
13368 *
13369 *
13370 * RETURN :
13371 *==========================================================================*/
13372
13373int QCamera3HardwareInterface::flush(
13374 const struct camera3_device *device)
13375{
13376 int rc;
13377 LOGD("E");
13378 QCamera3HardwareInterface *hw =
13379 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13380 if (!hw) {
13381 LOGE("NULL camera device");
13382 return -EINVAL;
13383 }
13384
13385 pthread_mutex_lock(&hw->mMutex);
13386 // Validate current state
13387 switch (hw->mState) {
13388 case STARTED:
13389 /* valid state */
13390 break;
13391
13392 case ERROR:
13393 pthread_mutex_unlock(&hw->mMutex);
13394 hw->handleCameraDeviceError();
13395 return -ENODEV;
13396
13397 default:
13398 LOGI("Flush returned during state %d", hw->mState);
13399 pthread_mutex_unlock(&hw->mMutex);
13400 return 0;
13401 }
13402 pthread_mutex_unlock(&hw->mMutex);
13403
13404 rc = hw->flush(true /* restart channels */ );
13405 LOGD("X");
13406 return rc;
13407}
13408
13409/*===========================================================================
13410 * FUNCTION : close_camera_device
13411 *
13412 * DESCRIPTION:
13413 *
13414 * PARAMETERS :
13415 *
13416 *
13417 * RETURN :
13418 *==========================================================================*/
13419int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13420{
13421 int ret = NO_ERROR;
13422 QCamera3HardwareInterface *hw =
13423 reinterpret_cast<QCamera3HardwareInterface *>(
13424 reinterpret_cast<camera3_device_t *>(device)->priv);
13425 if (!hw) {
13426 LOGE("NULL camera device");
13427 return BAD_VALUE;
13428 }
13429
13430 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13431 delete hw;
13432 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013433 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013434 return ret;
13435}
13436
13437/*===========================================================================
13438 * FUNCTION : getWaveletDenoiseProcessPlate
13439 *
13440 * DESCRIPTION: query wavelet denoise process plate
13441 *
13442 * PARAMETERS : None
13443 *
13444 * RETURN : WNR prcocess plate value
13445 *==========================================================================*/
13446cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13447{
13448 char prop[PROPERTY_VALUE_MAX];
13449 memset(prop, 0, sizeof(prop));
13450 property_get("persist.denoise.process.plates", prop, "0");
13451 int processPlate = atoi(prop);
13452 switch(processPlate) {
13453 case 0:
13454 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13455 case 1:
13456 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13457 case 2:
13458 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13459 case 3:
13460 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13461 default:
13462 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13463 }
13464}
13465
13466
13467/*===========================================================================
13468 * FUNCTION : getTemporalDenoiseProcessPlate
13469 *
13470 * DESCRIPTION: query temporal denoise process plate
13471 *
13472 * PARAMETERS : None
13473 *
13474 * RETURN : TNR prcocess plate value
13475 *==========================================================================*/
13476cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13477{
13478 char prop[PROPERTY_VALUE_MAX];
13479 memset(prop, 0, sizeof(prop));
13480 property_get("persist.tnr.process.plates", prop, "0");
13481 int processPlate = atoi(prop);
13482 switch(processPlate) {
13483 case 0:
13484 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13485 case 1:
13486 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13487 case 2:
13488 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13489 case 3:
13490 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13491 default:
13492 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13493 }
13494}
13495
13496
13497/*===========================================================================
13498 * FUNCTION : extractSceneMode
13499 *
13500 * DESCRIPTION: Extract scene mode from frameworks set metadata
13501 *
13502 * PARAMETERS :
13503 * @frame_settings: CameraMetadata reference
13504 * @metaMode: ANDROID_CONTORL_MODE
13505 * @hal_metadata: hal metadata structure
13506 *
13507 * RETURN : None
13508 *==========================================================================*/
13509int32_t QCamera3HardwareInterface::extractSceneMode(
13510 const CameraMetadata &frame_settings, uint8_t metaMode,
13511 metadata_buffer_t *hal_metadata)
13512{
13513 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013514 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13515
13516 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13517 LOGD("Ignoring control mode OFF_KEEP_STATE");
13518 return NO_ERROR;
13519 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013520
13521 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13522 camera_metadata_ro_entry entry =
13523 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13524 if (0 == entry.count)
13525 return rc;
13526
13527 uint8_t fwk_sceneMode = entry.data.u8[0];
13528
13529 int val = lookupHalName(SCENE_MODES_MAP,
13530 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13531 fwk_sceneMode);
13532 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013533 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013534 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013535 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013536 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013537
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013538 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13539 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13540 }
13541
13542 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13543 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013544 cam_hdr_param_t hdr_params;
13545 hdr_params.hdr_enable = 1;
13546 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13547 hdr_params.hdr_need_1x = false;
13548 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13549 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13550 rc = BAD_VALUE;
13551 }
13552 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013553
Thierry Strudel3d639192016-09-09 11:52:26 -070013554 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13555 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13556 rc = BAD_VALUE;
13557 }
13558 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013559
13560 if (mForceHdrSnapshot) {
13561 cam_hdr_param_t hdr_params;
13562 hdr_params.hdr_enable = 1;
13563 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13564 hdr_params.hdr_need_1x = false;
13565 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13566 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13567 rc = BAD_VALUE;
13568 }
13569 }
13570
Thierry Strudel3d639192016-09-09 11:52:26 -070013571 return rc;
13572}
13573
13574/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013575 * FUNCTION : setVideoHdrMode
13576 *
13577 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13578 *
13579 * PARAMETERS :
13580 * @hal_metadata: hal metadata structure
13581 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13582 *
13583 * RETURN : None
13584 *==========================================================================*/
13585int32_t QCamera3HardwareInterface::setVideoHdrMode(
13586 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13587{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013588 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13589 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13590 }
13591
13592 LOGE("Invalid Video HDR mode %d!", vhdr);
13593 return BAD_VALUE;
13594}
13595
13596/*===========================================================================
13597 * FUNCTION : setSensorHDR
13598 *
13599 * DESCRIPTION: Enable/disable sensor HDR.
13600 *
13601 * PARAMETERS :
13602 * @hal_metadata: hal metadata structure
13603 * @enable: boolean whether to enable/disable sensor HDR
13604 *
13605 * RETURN : None
13606 *==========================================================================*/
13607int32_t QCamera3HardwareInterface::setSensorHDR(
13608 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13609{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013610 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013611 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13612
13613 if (enable) {
13614 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13615 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13616 #ifdef _LE_CAMERA_
13617 //Default to staggered HDR for IOT
13618 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13619 #else
13620 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13621 #endif
13622 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13623 }
13624
13625 bool isSupported = false;
13626 switch (sensor_hdr) {
13627 case CAM_SENSOR_HDR_IN_SENSOR:
13628 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13629 CAM_QCOM_FEATURE_SENSOR_HDR) {
13630 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013631 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013632 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013633 break;
13634 case CAM_SENSOR_HDR_ZIGZAG:
13635 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13636 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13637 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013638 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013639 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013640 break;
13641 case CAM_SENSOR_HDR_STAGGERED:
13642 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13643 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13644 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013645 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013646 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013647 break;
13648 case CAM_SENSOR_HDR_OFF:
13649 isSupported = true;
13650 LOGD("Turning off sensor HDR");
13651 break;
13652 default:
13653 LOGE("HDR mode %d not supported", sensor_hdr);
13654 rc = BAD_VALUE;
13655 break;
13656 }
13657
13658 if(isSupported) {
13659 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13660 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13661 rc = BAD_VALUE;
13662 } else {
13663 if(!isVideoHdrEnable)
13664 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013665 }
13666 }
13667 return rc;
13668}
13669
13670/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013671 * FUNCTION : needRotationReprocess
13672 *
13673 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13674 *
13675 * PARAMETERS : none
13676 *
13677 * RETURN : true: needed
13678 * false: no need
13679 *==========================================================================*/
13680bool QCamera3HardwareInterface::needRotationReprocess()
13681{
13682 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13683 // current rotation is not zero, and pp has the capability to process rotation
13684 LOGH("need do reprocess for rotation");
13685 return true;
13686 }
13687
13688 return false;
13689}
13690
13691/*===========================================================================
13692 * FUNCTION : needReprocess
13693 *
13694 * DESCRIPTION: if reprocess in needed
13695 *
13696 * PARAMETERS : none
13697 *
13698 * RETURN : true: needed
13699 * false: no need
13700 *==========================================================================*/
13701bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13702{
13703 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13704 // TODO: add for ZSL HDR later
13705 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13706 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13707 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13708 return true;
13709 } else {
13710 LOGH("already post processed frame");
13711 return false;
13712 }
13713 }
13714 return needRotationReprocess();
13715}
13716
13717/*===========================================================================
13718 * FUNCTION : needJpegExifRotation
13719 *
13720 * DESCRIPTION: if rotation from jpeg is needed
13721 *
13722 * PARAMETERS : none
13723 *
13724 * RETURN : true: needed
13725 * false: no need
13726 *==========================================================================*/
13727bool QCamera3HardwareInterface::needJpegExifRotation()
13728{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013729 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013730 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13731 LOGD("Need use Jpeg EXIF Rotation");
13732 return true;
13733 }
13734 return false;
13735}
13736
13737/*===========================================================================
13738 * FUNCTION : addOfflineReprocChannel
13739 *
13740 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13741 * coming from input channel
13742 *
13743 * PARAMETERS :
13744 * @config : reprocess configuration
13745 * @inputChHandle : pointer to the input (source) channel
13746 *
13747 *
13748 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13749 *==========================================================================*/
13750QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13751 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13752{
13753 int32_t rc = NO_ERROR;
13754 QCamera3ReprocessChannel *pChannel = NULL;
13755
13756 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013757 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13758 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013759 if (NULL == pChannel) {
13760 LOGE("no mem for reprocess channel");
13761 return NULL;
13762 }
13763
13764 rc = pChannel->initialize(IS_TYPE_NONE);
13765 if (rc != NO_ERROR) {
13766 LOGE("init reprocess channel failed, ret = %d", rc);
13767 delete pChannel;
13768 return NULL;
13769 }
13770
13771 // pp feature config
13772 cam_pp_feature_config_t pp_config;
13773 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13774
13775 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13776 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13777 & CAM_QCOM_FEATURE_DSDN) {
13778 //Use CPP CDS incase h/w supports it.
13779 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13780 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13781 }
13782 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13783 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13784 }
13785
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013786 if (config.hdr_param.hdr_enable) {
13787 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13788 pp_config.hdr_param = config.hdr_param;
13789 }
13790
13791 if (mForceHdrSnapshot) {
13792 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13793 pp_config.hdr_param.hdr_enable = 1;
13794 pp_config.hdr_param.hdr_need_1x = 0;
13795 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13796 }
13797
Thierry Strudel3d639192016-09-09 11:52:26 -070013798 rc = pChannel->addReprocStreamsFromSource(pp_config,
13799 config,
13800 IS_TYPE_NONE,
13801 mMetadataChannel);
13802
13803 if (rc != NO_ERROR) {
13804 delete pChannel;
13805 return NULL;
13806 }
13807 return pChannel;
13808}
13809
13810/*===========================================================================
13811 * FUNCTION : getMobicatMask
13812 *
13813 * DESCRIPTION: returns mobicat mask
13814 *
13815 * PARAMETERS : none
13816 *
13817 * RETURN : mobicat mask
13818 *
13819 *==========================================================================*/
13820uint8_t QCamera3HardwareInterface::getMobicatMask()
13821{
13822 return m_MobicatMask;
13823}
13824
13825/*===========================================================================
13826 * FUNCTION : setMobicat
13827 *
13828 * DESCRIPTION: set Mobicat on/off.
13829 *
13830 * PARAMETERS :
13831 * @params : none
13832 *
13833 * RETURN : int32_t type of status
13834 * NO_ERROR -- success
13835 * none-zero failure code
13836 *==========================================================================*/
13837int32_t QCamera3HardwareInterface::setMobicat()
13838{
Thierry Strudel3d639192016-09-09 11:52:26 -070013839 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013840
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013841 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013842 tune_cmd_t tune_cmd;
13843 tune_cmd.type = SET_RELOAD_CHROMATIX;
13844 tune_cmd.module = MODULE_ALL;
13845 tune_cmd.value = TRUE;
13846 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13847 CAM_INTF_PARM_SET_VFE_COMMAND,
13848 tune_cmd);
13849
13850 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13851 CAM_INTF_PARM_SET_PP_COMMAND,
13852 tune_cmd);
13853 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013854
13855 return ret;
13856}
13857
13858/*===========================================================================
13859* FUNCTION : getLogLevel
13860*
13861* DESCRIPTION: Reads the log level property into a variable
13862*
13863* PARAMETERS :
13864* None
13865*
13866* RETURN :
13867* None
13868*==========================================================================*/
13869void QCamera3HardwareInterface::getLogLevel()
13870{
13871 char prop[PROPERTY_VALUE_MAX];
13872 uint32_t globalLogLevel = 0;
13873
13874 property_get("persist.camera.hal.debug", prop, "0");
13875 int val = atoi(prop);
13876 if (0 <= val) {
13877 gCamHal3LogLevel = (uint32_t)val;
13878 }
13879
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013880 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013881 gKpiDebugLevel = atoi(prop);
13882
13883 property_get("persist.camera.global.debug", prop, "0");
13884 val = atoi(prop);
13885 if (0 <= val) {
13886 globalLogLevel = (uint32_t)val;
13887 }
13888
13889 /* Highest log level among hal.logs and global.logs is selected */
13890 if (gCamHal3LogLevel < globalLogLevel)
13891 gCamHal3LogLevel = globalLogLevel;
13892
13893 return;
13894}
13895
13896/*===========================================================================
13897 * FUNCTION : validateStreamRotations
13898 *
13899 * DESCRIPTION: Check if the rotations requested are supported
13900 *
13901 * PARAMETERS :
13902 * @stream_list : streams to be configured
13903 *
13904 * RETURN : NO_ERROR on success
13905 * -EINVAL on failure
13906 *
13907 *==========================================================================*/
13908int QCamera3HardwareInterface::validateStreamRotations(
13909 camera3_stream_configuration_t *streamList)
13910{
13911 int rc = NO_ERROR;
13912
13913 /*
13914 * Loop through all streams requested in configuration
13915 * Check if unsupported rotations have been requested on any of them
13916 */
13917 for (size_t j = 0; j < streamList->num_streams; j++){
13918 camera3_stream_t *newStream = streamList->streams[j];
13919
Emilian Peev35ceeed2017-06-29 11:58:56 -070013920 switch(newStream->rotation) {
13921 case CAMERA3_STREAM_ROTATION_0:
13922 case CAMERA3_STREAM_ROTATION_90:
13923 case CAMERA3_STREAM_ROTATION_180:
13924 case CAMERA3_STREAM_ROTATION_270:
13925 //Expected values
13926 break;
13927 default:
13928 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13929 "type:%d and stream format:%d", __func__,
13930 newStream->rotation, newStream->stream_type,
13931 newStream->format);
13932 return -EINVAL;
13933 }
13934
Thierry Strudel3d639192016-09-09 11:52:26 -070013935 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13936 bool isImplDef = (newStream->format ==
13937 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13938 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13939 isImplDef);
13940
13941 if (isRotated && (!isImplDef || isZsl)) {
13942 LOGE("Error: Unsupported rotation of %d requested for stream"
13943 "type:%d and stream format:%d",
13944 newStream->rotation, newStream->stream_type,
13945 newStream->format);
13946 rc = -EINVAL;
13947 break;
13948 }
13949 }
13950
13951 return rc;
13952}
13953
13954/*===========================================================================
13955* FUNCTION : getFlashInfo
13956*
13957* DESCRIPTION: Retrieve information about whether the device has a flash.
13958*
13959* PARAMETERS :
13960* @cameraId : Camera id to query
13961* @hasFlash : Boolean indicating whether there is a flash device
13962* associated with given camera
13963* @flashNode : If a flash device exists, this will be its device node.
13964*
13965* RETURN :
13966* None
13967*==========================================================================*/
13968void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13969 bool& hasFlash,
13970 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13971{
13972 cam_capability_t* camCapability = gCamCapability[cameraId];
13973 if (NULL == camCapability) {
13974 hasFlash = false;
13975 flashNode[0] = '\0';
13976 } else {
13977 hasFlash = camCapability->flash_available;
13978 strlcpy(flashNode,
13979 (char*)camCapability->flash_dev_name,
13980 QCAMERA_MAX_FILEPATH_LENGTH);
13981 }
13982}
13983
13984/*===========================================================================
13985* FUNCTION : getEepromVersionInfo
13986*
13987* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13988*
13989* PARAMETERS : None
13990*
13991* RETURN : string describing EEPROM version
13992* "\0" if no such info available
13993*==========================================================================*/
13994const char *QCamera3HardwareInterface::getEepromVersionInfo()
13995{
13996 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13997}
13998
13999/*===========================================================================
14000* FUNCTION : getLdafCalib
14001*
14002* DESCRIPTION: Retrieve Laser AF calibration data
14003*
14004* PARAMETERS : None
14005*
14006* RETURN : Two uint32_t describing laser AF calibration data
14007* NULL if none is available.
14008*==========================================================================*/
14009const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14010{
14011 if (mLdafCalibExist) {
14012 return &mLdafCalib[0];
14013 } else {
14014 return NULL;
14015 }
14016}
14017
14018/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014019* FUNCTION : getEaselFwVersion
14020*
14021* DESCRIPTION: Retrieve Easel firmware version
14022*
14023* PARAMETERS : None
14024*
14025* RETURN : string describing Firmware version
14026* "\0" if Easel manager client is not open
14027*==========================================================================*/
14028const char *QCamera3HardwareInterface::getEaselFwVersion()
14029{
14030 int rc = NO_ERROR;
14031
14032 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
14033 ALOGD("%s: Querying Easel firmware version", __FUNCTION__);
14034 if (EaselManagerClientOpened) {
14035 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
14036 if (rc != OK)
14037 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
14038 else
14039 return (const char *)&mEaselFwVersion[0];
14040 }
14041 return NULL;
14042}
14043
14044/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014045 * FUNCTION : dynamicUpdateMetaStreamInfo
14046 *
14047 * DESCRIPTION: This function:
14048 * (1) stops all the channels
14049 * (2) returns error on pending requests and buffers
14050 * (3) sends metastream_info in setparams
14051 * (4) starts all channels
14052 * This is useful when sensor has to be restarted to apply any
14053 * settings such as frame rate from a different sensor mode
14054 *
14055 * PARAMETERS : None
14056 *
14057 * RETURN : NO_ERROR on success
14058 * Error codes on failure
14059 *
14060 *==========================================================================*/
14061int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14062{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014063 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014064 int rc = NO_ERROR;
14065
14066 LOGD("E");
14067
14068 rc = stopAllChannels();
14069 if (rc < 0) {
14070 LOGE("stopAllChannels failed");
14071 return rc;
14072 }
14073
14074 rc = notifyErrorForPendingRequests();
14075 if (rc < 0) {
14076 LOGE("notifyErrorForPendingRequests failed");
14077 return rc;
14078 }
14079
14080 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14081 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14082 "Format:%d",
14083 mStreamConfigInfo.type[i],
14084 mStreamConfigInfo.stream_sizes[i].width,
14085 mStreamConfigInfo.stream_sizes[i].height,
14086 mStreamConfigInfo.postprocess_mask[i],
14087 mStreamConfigInfo.format[i]);
14088 }
14089
14090 /* Send meta stream info once again so that ISP can start */
14091 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14092 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14093 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14094 mParameters);
14095 if (rc < 0) {
14096 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14097 }
14098
14099 rc = startAllChannels();
14100 if (rc < 0) {
14101 LOGE("startAllChannels failed");
14102 return rc;
14103 }
14104
14105 LOGD("X");
14106 return rc;
14107}
14108
14109/*===========================================================================
14110 * FUNCTION : stopAllChannels
14111 *
14112 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14113 *
14114 * PARAMETERS : None
14115 *
14116 * RETURN : NO_ERROR on success
14117 * Error codes on failure
14118 *
14119 *==========================================================================*/
14120int32_t QCamera3HardwareInterface::stopAllChannels()
14121{
14122 int32_t rc = NO_ERROR;
14123
14124 LOGD("Stopping all channels");
14125 // Stop the Streams/Channels
14126 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14127 it != mStreamInfo.end(); it++) {
14128 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14129 if (channel) {
14130 channel->stop();
14131 }
14132 (*it)->status = INVALID;
14133 }
14134
14135 if (mSupportChannel) {
14136 mSupportChannel->stop();
14137 }
14138 if (mAnalysisChannel) {
14139 mAnalysisChannel->stop();
14140 }
14141 if (mRawDumpChannel) {
14142 mRawDumpChannel->stop();
14143 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014144 if (mHdrPlusRawSrcChannel) {
14145 mHdrPlusRawSrcChannel->stop();
14146 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014147 if (mMetadataChannel) {
14148 /* If content of mStreamInfo is not 0, there is metadata stream */
14149 mMetadataChannel->stop();
14150 }
14151
14152 LOGD("All channels stopped");
14153 return rc;
14154}
14155
14156/*===========================================================================
14157 * FUNCTION : startAllChannels
14158 *
14159 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14160 *
14161 * PARAMETERS : None
14162 *
14163 * RETURN : NO_ERROR on success
14164 * Error codes on failure
14165 *
14166 *==========================================================================*/
14167int32_t QCamera3HardwareInterface::startAllChannels()
14168{
14169 int32_t rc = NO_ERROR;
14170
14171 LOGD("Start all channels ");
14172 // Start the Streams/Channels
14173 if (mMetadataChannel) {
14174 /* If content of mStreamInfo is not 0, there is metadata stream */
14175 rc = mMetadataChannel->start();
14176 if (rc < 0) {
14177 LOGE("META channel start failed");
14178 return rc;
14179 }
14180 }
14181 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14182 it != mStreamInfo.end(); it++) {
14183 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14184 if (channel) {
14185 rc = channel->start();
14186 if (rc < 0) {
14187 LOGE("channel start failed");
14188 return rc;
14189 }
14190 }
14191 }
14192 if (mAnalysisChannel) {
14193 mAnalysisChannel->start();
14194 }
14195 if (mSupportChannel) {
14196 rc = mSupportChannel->start();
14197 if (rc < 0) {
14198 LOGE("Support channel start failed");
14199 return rc;
14200 }
14201 }
14202 if (mRawDumpChannel) {
14203 rc = mRawDumpChannel->start();
14204 if (rc < 0) {
14205 LOGE("RAW dump channel start failed");
14206 return rc;
14207 }
14208 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014209 if (mHdrPlusRawSrcChannel) {
14210 rc = mHdrPlusRawSrcChannel->start();
14211 if (rc < 0) {
14212 LOGE("HDR+ RAW channel start failed");
14213 return rc;
14214 }
14215 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014216
14217 LOGD("All channels started");
14218 return rc;
14219}
14220
14221/*===========================================================================
14222 * FUNCTION : notifyErrorForPendingRequests
14223 *
14224 * DESCRIPTION: This function sends error for all the pending requests/buffers
14225 *
14226 * PARAMETERS : None
14227 *
14228 * RETURN : Error codes
14229 * NO_ERROR on success
14230 *
14231 *==========================================================================*/
14232int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14233{
Emilian Peev7650c122017-01-19 08:24:33 -080014234 notifyErrorFoPendingDepthData(mDepthChannel);
14235
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014236 auto pendingRequest = mPendingRequestsList.begin();
14237 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014238
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014239 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14240 // buffers (for which buffers aren't sent yet).
14241 while (pendingRequest != mPendingRequestsList.end() ||
14242 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14243 if (pendingRequest == mPendingRequestsList.end() ||
14244 pendingBuffer->frame_number < pendingRequest->frame_number) {
14245 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14246 // with error.
14247 for (auto &info : pendingBuffer->mPendingBufferList) {
14248 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014249 camera3_notify_msg_t notify_msg;
14250 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14251 notify_msg.type = CAMERA3_MSG_ERROR;
14252 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014253 notify_msg.message.error.error_stream = info.stream;
14254 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014255 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014256
14257 camera3_stream_buffer_t buffer = {};
14258 buffer.acquire_fence = -1;
14259 buffer.release_fence = -1;
14260 buffer.buffer = info.buffer;
14261 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14262 buffer.stream = info.stream;
14263 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014264 }
14265
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014266 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14267 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14268 pendingBuffer->frame_number > pendingRequest->frame_number) {
14269 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014270 camera3_notify_msg_t notify_msg;
14271 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14272 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014273 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14274 notify_msg.message.error.error_stream = nullptr;
14275 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014276 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014277
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014278 if (pendingRequest->input_buffer != nullptr) {
14279 camera3_capture_result result = {};
14280 result.frame_number = pendingRequest->frame_number;
14281 result.result = nullptr;
14282 result.input_buffer = pendingRequest->input_buffer;
14283 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014284 }
14285
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014286 mShutterDispatcher.clear(pendingRequest->frame_number);
14287 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14288 } else {
14289 // If both buffers and result metadata weren't sent yet, notify about a request error
14290 // and return buffers with error.
14291 for (auto &info : pendingBuffer->mPendingBufferList) {
14292 camera3_notify_msg_t notify_msg;
14293 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14294 notify_msg.type = CAMERA3_MSG_ERROR;
14295 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14296 notify_msg.message.error.error_stream = info.stream;
14297 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14298 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014299
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014300 camera3_stream_buffer_t buffer = {};
14301 buffer.acquire_fence = -1;
14302 buffer.release_fence = -1;
14303 buffer.buffer = info.buffer;
14304 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14305 buffer.stream = info.stream;
14306 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14307 }
14308
14309 if (pendingRequest->input_buffer != nullptr) {
14310 camera3_capture_result result = {};
14311 result.frame_number = pendingRequest->frame_number;
14312 result.result = nullptr;
14313 result.input_buffer = pendingRequest->input_buffer;
14314 orchestrateResult(&result);
14315 }
14316
14317 mShutterDispatcher.clear(pendingRequest->frame_number);
14318 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14319 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014320 }
14321 }
14322
14323 /* Reset pending frame Drop list and requests list */
14324 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014325 mShutterDispatcher.clear();
14326 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014327 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014328 mExpectedFrameDuration = 0;
14329 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014330 LOGH("Cleared all the pending buffers ");
14331
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014332 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014333}
14334
14335bool QCamera3HardwareInterface::isOnEncoder(
14336 const cam_dimension_t max_viewfinder_size,
14337 uint32_t width, uint32_t height)
14338{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014339 return ((width > (uint32_t)max_viewfinder_size.width) ||
14340 (height > (uint32_t)max_viewfinder_size.height) ||
14341 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14342 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014343}
14344
14345/*===========================================================================
14346 * FUNCTION : setBundleInfo
14347 *
14348 * DESCRIPTION: Set bundle info for all streams that are bundle.
14349 *
14350 * PARAMETERS : None
14351 *
14352 * RETURN : NO_ERROR on success
14353 * Error codes on failure
14354 *==========================================================================*/
14355int32_t QCamera3HardwareInterface::setBundleInfo()
14356{
14357 int32_t rc = NO_ERROR;
14358
14359 if (mChannelHandle) {
14360 cam_bundle_config_t bundleInfo;
14361 memset(&bundleInfo, 0, sizeof(bundleInfo));
14362 rc = mCameraHandle->ops->get_bundle_info(
14363 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14364 if (rc != NO_ERROR) {
14365 LOGE("get_bundle_info failed");
14366 return rc;
14367 }
14368 if (mAnalysisChannel) {
14369 mAnalysisChannel->setBundleInfo(bundleInfo);
14370 }
14371 if (mSupportChannel) {
14372 mSupportChannel->setBundleInfo(bundleInfo);
14373 }
14374 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14375 it != mStreamInfo.end(); it++) {
14376 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14377 channel->setBundleInfo(bundleInfo);
14378 }
14379 if (mRawDumpChannel) {
14380 mRawDumpChannel->setBundleInfo(bundleInfo);
14381 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014382 if (mHdrPlusRawSrcChannel) {
14383 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14384 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014385 }
14386
14387 return rc;
14388}
14389
14390/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014391 * FUNCTION : setInstantAEC
14392 *
14393 * DESCRIPTION: Set Instant AEC related params.
14394 *
14395 * PARAMETERS :
14396 * @meta: CameraMetadata reference
14397 *
14398 * RETURN : NO_ERROR on success
14399 * Error codes on failure
14400 *==========================================================================*/
14401int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14402{
14403 int32_t rc = NO_ERROR;
14404 uint8_t val = 0;
14405 char prop[PROPERTY_VALUE_MAX];
14406
14407 // First try to configure instant AEC from framework metadata
14408 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14409 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14410 }
14411
14412 // If framework did not set this value, try to read from set prop.
14413 if (val == 0) {
14414 memset(prop, 0, sizeof(prop));
14415 property_get("persist.camera.instant.aec", prop, "0");
14416 val = (uint8_t)atoi(prop);
14417 }
14418
14419 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14420 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14421 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14422 mInstantAEC = val;
14423 mInstantAECSettledFrameNumber = 0;
14424 mInstantAecFrameIdxCount = 0;
14425 LOGH("instantAEC value set %d",val);
14426 if (mInstantAEC) {
14427 memset(prop, 0, sizeof(prop));
14428 property_get("persist.camera.ae.instant.bound", prop, "10");
14429 int32_t aec_frame_skip_cnt = atoi(prop);
14430 if (aec_frame_skip_cnt >= 0) {
14431 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14432 } else {
14433 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14434 rc = BAD_VALUE;
14435 }
14436 }
14437 } else {
14438 LOGE("Bad instant aec value set %d", val);
14439 rc = BAD_VALUE;
14440 }
14441 return rc;
14442}
14443
14444/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014445 * FUNCTION : get_num_overall_buffers
14446 *
14447 * DESCRIPTION: Estimate number of pending buffers across all requests.
14448 *
14449 * PARAMETERS : None
14450 *
14451 * RETURN : Number of overall pending buffers
14452 *
14453 *==========================================================================*/
14454uint32_t PendingBuffersMap::get_num_overall_buffers()
14455{
14456 uint32_t sum_buffers = 0;
14457 for (auto &req : mPendingBuffersInRequest) {
14458 sum_buffers += req.mPendingBufferList.size();
14459 }
14460 return sum_buffers;
14461}
14462
14463/*===========================================================================
14464 * FUNCTION : removeBuf
14465 *
14466 * DESCRIPTION: Remove a matching buffer from tracker.
14467 *
14468 * PARAMETERS : @buffer: image buffer for the callback
14469 *
14470 * RETURN : None
14471 *
14472 *==========================================================================*/
14473void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14474{
14475 bool buffer_found = false;
14476 for (auto req = mPendingBuffersInRequest.begin();
14477 req != mPendingBuffersInRequest.end(); req++) {
14478 for (auto k = req->mPendingBufferList.begin();
14479 k != req->mPendingBufferList.end(); k++ ) {
14480 if (k->buffer == buffer) {
14481 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14482 req->frame_number, buffer);
14483 k = req->mPendingBufferList.erase(k);
14484 if (req->mPendingBufferList.empty()) {
14485 // Remove this request from Map
14486 req = mPendingBuffersInRequest.erase(req);
14487 }
14488 buffer_found = true;
14489 break;
14490 }
14491 }
14492 if (buffer_found) {
14493 break;
14494 }
14495 }
14496 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14497 get_num_overall_buffers());
14498}
14499
14500/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014501 * FUNCTION : getBufErrStatus
14502 *
14503 * DESCRIPTION: get buffer error status
14504 *
14505 * PARAMETERS : @buffer: buffer handle
14506 *
14507 * RETURN : Error status
14508 *
14509 *==========================================================================*/
14510int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14511{
14512 for (auto& req : mPendingBuffersInRequest) {
14513 for (auto& k : req.mPendingBufferList) {
14514 if (k.buffer == buffer)
14515 return k.bufStatus;
14516 }
14517 }
14518 return CAMERA3_BUFFER_STATUS_OK;
14519}
14520
14521/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014522 * FUNCTION : setPAAFSupport
14523 *
14524 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14525 * feature mask according to stream type and filter
14526 * arrangement
14527 *
14528 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14529 * @stream_type: stream type
14530 * @filter_arrangement: filter arrangement
14531 *
14532 * RETURN : None
14533 *==========================================================================*/
14534void QCamera3HardwareInterface::setPAAFSupport(
14535 cam_feature_mask_t& feature_mask,
14536 cam_stream_type_t stream_type,
14537 cam_color_filter_arrangement_t filter_arrangement)
14538{
Thierry Strudel3d639192016-09-09 11:52:26 -070014539 switch (filter_arrangement) {
14540 case CAM_FILTER_ARRANGEMENT_RGGB:
14541 case CAM_FILTER_ARRANGEMENT_GRBG:
14542 case CAM_FILTER_ARRANGEMENT_GBRG:
14543 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014544 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14545 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014546 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014547 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14548 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014549 }
14550 break;
14551 case CAM_FILTER_ARRANGEMENT_Y:
14552 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14553 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14554 }
14555 break;
14556 default:
14557 break;
14558 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014559 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14560 feature_mask, stream_type, filter_arrangement);
14561
14562
Thierry Strudel3d639192016-09-09 11:52:26 -070014563}
14564
14565/*===========================================================================
14566* FUNCTION : getSensorMountAngle
14567*
14568* DESCRIPTION: Retrieve sensor mount angle
14569*
14570* PARAMETERS : None
14571*
14572* RETURN : sensor mount angle in uint32_t
14573*==========================================================================*/
14574uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14575{
14576 return gCamCapability[mCameraId]->sensor_mount_angle;
14577}
14578
14579/*===========================================================================
14580* FUNCTION : getRelatedCalibrationData
14581*
14582* DESCRIPTION: Retrieve related system calibration data
14583*
14584* PARAMETERS : None
14585*
14586* RETURN : Pointer of related system calibration data
14587*==========================================================================*/
14588const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14589{
14590 return (const cam_related_system_calibration_data_t *)
14591 &(gCamCapability[mCameraId]->related_cam_calibration);
14592}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014593
14594/*===========================================================================
14595 * FUNCTION : is60HzZone
14596 *
14597 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14598 *
14599 * PARAMETERS : None
14600 *
14601 * RETURN : True if in 60Hz zone, False otherwise
14602 *==========================================================================*/
14603bool QCamera3HardwareInterface::is60HzZone()
14604{
14605 time_t t = time(NULL);
14606 struct tm lt;
14607
14608 struct tm* r = localtime_r(&t, &lt);
14609
14610 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14611 return true;
14612 else
14613 return false;
14614}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014615
14616/*===========================================================================
14617 * FUNCTION : adjustBlackLevelForCFA
14618 *
14619 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14620 * of bayer CFA (Color Filter Array).
14621 *
14622 * PARAMETERS : @input: black level pattern in the order of RGGB
14623 * @output: black level pattern in the order of CFA
14624 * @color_arrangement: CFA color arrangement
14625 *
14626 * RETURN : None
14627 *==========================================================================*/
14628template<typename T>
14629void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14630 T input[BLACK_LEVEL_PATTERN_CNT],
14631 T output[BLACK_LEVEL_PATTERN_CNT],
14632 cam_color_filter_arrangement_t color_arrangement)
14633{
14634 switch (color_arrangement) {
14635 case CAM_FILTER_ARRANGEMENT_GRBG:
14636 output[0] = input[1];
14637 output[1] = input[0];
14638 output[2] = input[3];
14639 output[3] = input[2];
14640 break;
14641 case CAM_FILTER_ARRANGEMENT_GBRG:
14642 output[0] = input[2];
14643 output[1] = input[3];
14644 output[2] = input[0];
14645 output[3] = input[1];
14646 break;
14647 case CAM_FILTER_ARRANGEMENT_BGGR:
14648 output[0] = input[3];
14649 output[1] = input[2];
14650 output[2] = input[1];
14651 output[3] = input[0];
14652 break;
14653 case CAM_FILTER_ARRANGEMENT_RGGB:
14654 output[0] = input[0];
14655 output[1] = input[1];
14656 output[2] = input[2];
14657 output[3] = input[3];
14658 break;
14659 default:
14660 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14661 break;
14662 }
14663}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014664
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014665void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14666 CameraMetadata &resultMetadata,
14667 std::shared_ptr<metadata_buffer_t> settings)
14668{
14669 if (settings == nullptr) {
14670 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14671 return;
14672 }
14673
14674 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14675 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14676 }
14677
14678 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14679 String8 str((const char *)gps_methods);
14680 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14681 }
14682
14683 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14684 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14685 }
14686
14687 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14688 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14689 }
14690
14691 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14692 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14693 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14694 }
14695
14696 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14697 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14698 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14699 }
14700
14701 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14702 int32_t fwk_thumb_size[2];
14703 fwk_thumb_size[0] = thumb_size->width;
14704 fwk_thumb_size[1] = thumb_size->height;
14705 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14706 }
14707
14708 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14709 uint8_t fwk_intent = intent[0];
14710 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14711 }
14712}
14713
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014714bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14715 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014716 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14717 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14718 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014719 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014720 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014721 return false;
14722 }
14723
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014724 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014725 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14726 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014727 return false;
14728 }
14729
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014730 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14731 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14732 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14733 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14734 return false;
14735 }
14736
14737 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14738 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14739 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14740 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14741 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14742 return false;
14743 }
14744
14745 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14746 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14747 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14748 return false;
14749 }
14750
14751 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14752 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14753 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14754 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14755 return false;
14756 }
14757
14758 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14759 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14760 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14761 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14762 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14763 return false;
14764 }
14765
14766 // TODO (b/32585046): support non-ZSL.
14767 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14768 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14769 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14770 return false;
14771 }
14772
14773 // TODO (b/32586081): support flash.
14774 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14775 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14776 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14777 return false;
14778 }
14779
14780 // TODO (b/36492953): support digital zoom.
14781 if (!metadata.exists(ANDROID_SCALER_CROP_REGION) ||
14782 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[0] != 0 ||
14783 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[1] != 0 ||
14784 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[2] !=
14785 gCamCapability[mCameraId]->active_array_size.width ||
14786 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[3] !=
14787 gCamCapability[mCameraId]->active_array_size.height) {
14788 ALOGV("%s: ANDROID_SCALER_CROP_REGION is not the same as active array region.",
14789 __FUNCTION__);
14790 return false;
14791 }
14792
14793 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14794 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14795 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14796 return false;
14797 }
14798
14799 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014800 if (request.num_output_buffers != 1 ||
14801 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014802 ALOGV("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014803 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014804 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014805 request.output_buffers[0].stream->width,
14806 request.output_buffers[0].stream->height,
14807 request.output_buffers[0].stream->format);
14808 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014809 return false;
14810 }
14811
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014812 return true;
14813}
14814
14815bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14816 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14817 const CameraMetadata &metadata)
14818{
14819 if (hdrPlusRequest == nullptr) return false;
14820 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14821
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014822 // Get a YUV buffer from pic channel.
14823 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14824 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14825 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14826 if (res != OK) {
14827 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14828 __FUNCTION__, strerror(-res), res);
14829 return false;
14830 }
14831
14832 pbcamera::StreamBuffer buffer;
14833 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014834 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chencec36ed2017-07-21 13:54:29 -070014835 buffer.data = yuvBuffer->fd == -1 ? yuvBuffer->buffer : nullptr;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014836 buffer.dataSize = yuvBuffer->frame_len;
14837
14838 pbcamera::CaptureRequest pbRequest;
14839 pbRequest.id = request.frame_number;
14840 pbRequest.outputBuffers.push_back(buffer);
14841
14842 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070014843 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014844 if (res != OK) {
14845 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14846 strerror(-res), res);
14847 return false;
14848 }
14849
14850 hdrPlusRequest->yuvBuffer = yuvBuffer;
14851 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14852
14853 return true;
14854}
14855
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014856status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14857{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014858 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14859 return OK;
14860 }
14861
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014862 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014863 if (res != OK) {
14864 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14865 strerror(-res), res);
14866 return res;
14867 }
14868 gHdrPlusClientOpening = true;
14869
14870 return OK;
14871}
14872
Chien-Yu Chenee335912017-02-09 17:53:20 -080014873status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14874{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014875 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014876
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014877 if (mHdrPlusModeEnabled) {
14878 return OK;
14879 }
14880
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014881 // Check if gHdrPlusClient is opened or being opened.
14882 if (gHdrPlusClient == nullptr) {
14883 if (gHdrPlusClientOpening) {
14884 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14885 return OK;
14886 }
14887
14888 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014889 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014890 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14891 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014892 return res;
14893 }
14894
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014895 // When opening HDR+ client completes, HDR+ mode will be enabled.
14896 return OK;
14897
Chien-Yu Chenee335912017-02-09 17:53:20 -080014898 }
14899
14900 // Configure stream for HDR+.
14901 res = configureHdrPlusStreamsLocked();
14902 if (res != OK) {
14903 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014904 return res;
14905 }
14906
14907 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14908 res = gHdrPlusClient->setZslHdrPlusMode(true);
14909 if (res != OK) {
14910 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014911 return res;
14912 }
14913
14914 mHdrPlusModeEnabled = true;
14915 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14916
14917 return OK;
14918}
14919
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014920void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
14921{
14922 if (gHdrPlusClientOpening) {
14923 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
14924 }
14925}
14926
Chien-Yu Chenee335912017-02-09 17:53:20 -080014927void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14928{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014929 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014930 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014931 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14932 if (res != OK) {
14933 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14934 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014935
14936 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014937 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014938 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014939 }
14940
14941 mHdrPlusModeEnabled = false;
14942 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14943}
14944
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070014945bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
14946{
14947 // Check if mPictureChannel is valid.
14948 // TODO: Support YUV (b/36693254) and RAW (b/36690506)
14949 if (mPictureChannel == nullptr) {
14950 return false;
14951 }
14952
14953 return true;
14954}
14955
Chien-Yu Chenee335912017-02-09 17:53:20 -080014956status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014957{
14958 pbcamera::InputConfiguration inputConfig;
14959 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14960 status_t res = OK;
14961
14962 // Configure HDR+ client streams.
14963 // Get input config.
14964 if (mHdrPlusRawSrcChannel) {
14965 // HDR+ input buffers will be provided by HAL.
14966 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14967 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14968 if (res != OK) {
14969 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14970 __FUNCTION__, strerror(-res), res);
14971 return res;
14972 }
14973
14974 inputConfig.isSensorInput = false;
14975 } else {
14976 // Sensor MIPI will send data to Easel.
14977 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014978 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014979 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14980 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14981 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14982 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14983 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014984 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014985 if (mSensorModeInfo.num_raw_bits != 10) {
14986 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14987 mSensorModeInfo.num_raw_bits);
14988 return BAD_VALUE;
14989 }
14990
14991 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014992 }
14993
14994 // Get output configurations.
14995 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014996 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014997
14998 // Easel may need to output YUV output buffers if mPictureChannel was created.
14999 pbcamera::StreamConfiguration yuvOutputConfig;
15000 if (mPictureChannel != nullptr) {
15001 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
15002 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
15003 if (res != OK) {
15004 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15005 __FUNCTION__, strerror(-res), res);
15006
15007 return res;
15008 }
15009
15010 outputStreamConfigs.push_back(yuvOutputConfig);
15011 }
15012
15013 // TODO: consider other channels for YUV output buffers.
15014
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015015 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015016 if (res != OK) {
15017 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15018 strerror(-res), res);
15019 return res;
15020 }
15021
15022 return OK;
15023}
15024
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015025void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15026{
15027 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15028 // Set HAL state to error.
15029 pthread_mutex_lock(&mMutex);
15030 mState = ERROR;
15031 pthread_mutex_unlock(&mMutex);
15032
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015033 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015034}
15035
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015036void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15037{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015038 if (client == nullptr) {
15039 ALOGE("%s: Opened client is null.", __FUNCTION__);
15040 return;
15041 }
15042
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015043 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015044 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15045
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015046 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015047 if (!gHdrPlusClientOpening) {
15048 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15049 return;
15050 }
15051
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015052 gHdrPlusClient = std::move(client);
15053 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015054 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015055
15056 // Set static metadata.
15057 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15058 if (res != OK) {
15059 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15060 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015061 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015062 gHdrPlusClient = nullptr;
15063 return;
15064 }
15065
15066 // Enable HDR+ mode.
15067 res = enableHdrPlusModeLocked();
15068 if (res != OK) {
15069 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15070 }
15071}
15072
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015073void QCamera3HardwareInterface::onOpenFailed(status_t err)
15074{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015075 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015076 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015077 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015078 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015079}
15080
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015081void QCamera3HardwareInterface::onFatalError()
15082{
15083 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
15084
15085 // Set HAL state to error.
15086 pthread_mutex_lock(&mMutex);
15087 mState = ERROR;
15088 pthread_mutex_unlock(&mMutex);
15089
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015090 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015091}
15092
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015093void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15094{
15095 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15096 __LINE__, requestId, apSensorTimestampNs);
15097
15098 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15099}
15100
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015101void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15102{
15103 pthread_mutex_lock(&mMutex);
15104
15105 // Find the pending request for this result metadata.
15106 auto requestIter = mPendingRequestsList.begin();
15107 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15108 requestIter++;
15109 }
15110
15111 if (requestIter == mPendingRequestsList.end()) {
15112 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15113 pthread_mutex_unlock(&mMutex);
15114 return;
15115 }
15116
15117 requestIter->partial_result_cnt++;
15118
15119 CameraMetadata metadata;
15120 uint8_t ready = true;
15121 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15122
15123 // Send it to framework.
15124 camera3_capture_result_t result = {};
15125
15126 result.result = metadata.getAndLock();
15127 // Populate metadata result
15128 result.frame_number = requestId;
15129 result.num_output_buffers = 0;
15130 result.output_buffers = NULL;
15131 result.partial_result = requestIter->partial_result_cnt;
15132
15133 orchestrateResult(&result);
15134 metadata.unlock(result.result);
15135
15136 pthread_mutex_unlock(&mMutex);
15137}
15138
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015139void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015140 const camera_metadata_t &resultMetadata)
15141{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015142 if (result != nullptr) {
15143 if (result->outputBuffers.size() != 1) {
15144 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
15145 result->outputBuffers.size());
15146 return;
15147 }
15148
15149 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
15150 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
15151 result->outputBuffers[0].streamId);
15152 return;
15153 }
15154
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015155 // TODO (b/34854987): initiate this from HDR+ service.
15156 onNextCaptureReady(result->requestId);
15157
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015158 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015159 HdrPlusPendingRequest pendingRequest;
15160 {
15161 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15162 auto req = mHdrPlusPendingRequests.find(result->requestId);
15163 pendingRequest = req->second;
15164 }
15165
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015166 // Update the result metadata with the settings of the HDR+ still capture request because
15167 // the result metadata belongs to a ZSL buffer.
15168 CameraMetadata metadata;
15169 metadata = &resultMetadata;
15170 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15171 camera_metadata_t* updatedResultMetadata = metadata.release();
15172
15173 QCamera3PicChannel *picChannel =
15174 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
15175
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015176 // Check if dumping HDR+ YUV output is enabled.
15177 char prop[PROPERTY_VALUE_MAX];
15178 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15179 bool dumpYuvOutput = atoi(prop);
15180
15181 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015182 // Dump yuv buffer to a ppm file.
15183 pbcamera::StreamConfiguration outputConfig;
15184 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
15185 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
15186 if (rc == OK) {
15187 char buf[FILENAME_MAX] = {};
15188 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15189 result->requestId, result->outputBuffers[0].streamId,
15190 outputConfig.image.width, outputConfig.image.height);
15191
15192 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
15193 } else {
15194 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
15195 __FUNCTION__, strerror(-rc), rc);
15196 }
15197 }
15198
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015199 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
15200 auto halMetadata = std::make_shared<metadata_buffer_t>();
15201 clear_metadata_buffer(halMetadata.get());
15202
15203 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
15204 // encoding.
15205 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15206 halStreamId, /*minFrameDuration*/0);
15207 if (res == OK) {
15208 // Return the buffer to pic channel for encoding.
15209 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
15210 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
15211 halMetadata);
15212 } else {
15213 // Return the buffer without encoding.
15214 // TODO: This should not happen but we may want to report an error buffer to camera
15215 // service.
15216 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
15217 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
15218 strerror(-res), res);
15219 }
15220
15221 // Send HDR+ metadata to framework.
15222 {
15223 pthread_mutex_lock(&mMutex);
15224
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015225 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15226 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015227 pthread_mutex_unlock(&mMutex);
15228 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015229
15230 // Remove the HDR+ pending request.
15231 {
15232 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15233 auto req = mHdrPlusPendingRequests.find(result->requestId);
15234 mHdrPlusPendingRequests.erase(req);
15235 }
15236 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015237}
15238
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015239void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15240{
15241 if (failedResult == nullptr) {
15242 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15243 return;
15244 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015245
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015246 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015247
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015248 // Remove the pending HDR+ request.
15249 {
15250 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15251 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
15252
15253 // Return the buffer to pic channel.
15254 QCamera3PicChannel *picChannel =
15255 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
15256 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
15257
15258 mHdrPlusPendingRequests.erase(pendingRequest);
15259 }
15260
15261 pthread_mutex_lock(&mMutex);
15262
15263 // Find the pending buffers.
15264 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15265 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15266 if (pendingBuffers->frame_number == failedResult->requestId) {
15267 break;
15268 }
15269 pendingBuffers++;
15270 }
15271
15272 // Send out buffer errors for the pending buffers.
15273 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15274 std::vector<camera3_stream_buffer_t> streamBuffers;
15275 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15276 // Prepare a stream buffer.
15277 camera3_stream_buffer_t streamBuffer = {};
15278 streamBuffer.stream = buffer.stream;
15279 streamBuffer.buffer = buffer.buffer;
15280 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15281 streamBuffer.acquire_fence = -1;
15282 streamBuffer.release_fence = -1;
15283
15284 streamBuffers.push_back(streamBuffer);
15285
15286 // Send out error buffer event.
15287 camera3_notify_msg_t notify_msg = {};
15288 notify_msg.type = CAMERA3_MSG_ERROR;
15289 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15290 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15291 notify_msg.message.error.error_stream = buffer.stream;
15292
15293 orchestrateNotify(&notify_msg);
15294 }
15295
15296 camera3_capture_result_t result = {};
15297 result.frame_number = pendingBuffers->frame_number;
15298 result.num_output_buffers = streamBuffers.size();
15299 result.output_buffers = &streamBuffers[0];
15300
15301 // Send out result with buffer errors.
15302 orchestrateResult(&result);
15303
15304 // Remove pending buffers.
15305 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15306 }
15307
15308 // Remove pending request.
15309 auto halRequest = mPendingRequestsList.begin();
15310 while (halRequest != mPendingRequestsList.end()) {
15311 if (halRequest->frame_number == failedResult->requestId) {
15312 mPendingRequestsList.erase(halRequest);
15313 break;
15314 }
15315 halRequest++;
15316 }
15317
15318 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015319}
15320
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015321
15322ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15323 mParent(parent) {}
15324
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015325void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015326{
15327 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015328
15329 if (isReprocess) {
15330 mReprocessShutters.emplace(frameNumber, Shutter());
15331 } else {
15332 mShutters.emplace(frameNumber, Shutter());
15333 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015334}
15335
15336void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15337{
15338 std::lock_guard<std::mutex> lock(mLock);
15339
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015340 std::map<uint32_t, Shutter> *shutters = nullptr;
15341
15342 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015343 auto shutter = mShutters.find(frameNumber);
15344 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015345 shutter = mReprocessShutters.find(frameNumber);
15346 if (shutter == mReprocessShutters.end()) {
15347 // Shutter was already sent.
15348 return;
15349 }
15350 shutters = &mReprocessShutters;
15351 } else {
15352 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015353 }
15354
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015355 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015356 shutter->second.ready = true;
15357 shutter->second.timestamp = timestamp;
15358
15359 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015360 shutter = shutters->begin();
15361 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015362 if (!shutter->second.ready) {
15363 // If this shutter is not ready, the following shutters can't be sent.
15364 break;
15365 }
15366
15367 camera3_notify_msg_t msg = {};
15368 msg.type = CAMERA3_MSG_SHUTTER;
15369 msg.message.shutter.frame_number = shutter->first;
15370 msg.message.shutter.timestamp = shutter->second.timestamp;
15371 mParent->orchestrateNotify(&msg);
15372
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015373 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015374 }
15375}
15376
15377void ShutterDispatcher::clear(uint32_t frameNumber)
15378{
15379 std::lock_guard<std::mutex> lock(mLock);
15380 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015381 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015382}
15383
15384void ShutterDispatcher::clear()
15385{
15386 std::lock_guard<std::mutex> lock(mLock);
15387
15388 // Log errors for stale shutters.
15389 for (auto &shutter : mShutters) {
15390 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15391 __FUNCTION__, shutter.first, shutter.second.ready,
15392 shutter.second.timestamp);
15393 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015394
15395 // Log errors for stale reprocess shutters.
15396 for (auto &shutter : mReprocessShutters) {
15397 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15398 __FUNCTION__, shutter.first, shutter.second.ready,
15399 shutter.second.timestamp);
15400 }
15401
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015402 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015403 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015404}
15405
15406OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15407 mParent(parent) {}
15408
15409status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15410{
15411 std::lock_guard<std::mutex> lock(mLock);
15412 mStreamBuffers.clear();
15413 if (!streamList) {
15414 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15415 return -EINVAL;
15416 }
15417
15418 // Create a "frame-number -> buffer" map for each stream.
15419 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15420 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15421 }
15422
15423 return OK;
15424}
15425
15426status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15427{
15428 std::lock_guard<std::mutex> lock(mLock);
15429
15430 // Find the "frame-number -> buffer" map for the stream.
15431 auto buffers = mStreamBuffers.find(stream);
15432 if (buffers == mStreamBuffers.end()) {
15433 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15434 return -EINVAL;
15435 }
15436
15437 // Create an unready buffer for this frame number.
15438 buffers->second.emplace(frameNumber, Buffer());
15439 return OK;
15440}
15441
15442void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15443 const camera3_stream_buffer_t &buffer)
15444{
15445 std::lock_guard<std::mutex> lock(mLock);
15446
15447 // Find the frame number -> buffer map for the stream.
15448 auto buffers = mStreamBuffers.find(buffer.stream);
15449 if (buffers == mStreamBuffers.end()) {
15450 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15451 return;
15452 }
15453
15454 // Find the unready buffer this frame number and mark it ready.
15455 auto pendingBuffer = buffers->second.find(frameNumber);
15456 if (pendingBuffer == buffers->second.end()) {
15457 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15458 return;
15459 }
15460
15461 pendingBuffer->second.ready = true;
15462 pendingBuffer->second.buffer = buffer;
15463
15464 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15465 pendingBuffer = buffers->second.begin();
15466 while (pendingBuffer != buffers->second.end()) {
15467 if (!pendingBuffer->second.ready) {
15468 // If this buffer is not ready, the following buffers can't be sent.
15469 break;
15470 }
15471
15472 camera3_capture_result_t result = {};
15473 result.frame_number = pendingBuffer->first;
15474 result.num_output_buffers = 1;
15475 result.output_buffers = &pendingBuffer->second.buffer;
15476
15477 // Send out result with buffer errors.
15478 mParent->orchestrateResult(&result);
15479
15480 pendingBuffer = buffers->second.erase(pendingBuffer);
15481 }
15482}
15483
15484void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15485{
15486 std::lock_guard<std::mutex> lock(mLock);
15487
15488 // Log errors for stale buffers.
15489 for (auto &buffers : mStreamBuffers) {
15490 for (auto &buffer : buffers.second) {
15491 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15492 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15493 }
15494 buffers.second.clear();
15495 }
15496
15497 if (clearConfiguredStreams) {
15498 mStreamBuffers.clear();
15499 }
15500}
15501
Thierry Strudel3d639192016-09-09 11:52:26 -070015502}; //end namespace qcamera