blob: 7be606226e14fd35e142dc2a96fbf94ae3ca1b59 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
Eino-Ville Talvala0b3fcb02017-07-13 16:52:32 -070099#define MISSING_REQUEST_BUF_TIMEOUT 10
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Jason Lee8ce36fa2017-04-19 19:40:37 -0700116/* Face rect indices */
117#define FACE_LEFT 0
118#define FACE_TOP 1
119#define FACE_RIGHT 2
120#define FACE_BOTTOM 3
121#define FACE_WEIGHT 4
122
Thierry Strudel04e026f2016-10-10 11:27:36 -0700123/* Face landmarks indices */
124#define LEFT_EYE_X 0
125#define LEFT_EYE_Y 1
126#define RIGHT_EYE_X 2
127#define RIGHT_EYE_Y 3
128#define MOUTH_X 4
129#define MOUTH_Y 5
130#define TOTAL_LANDMARK_INDICES 6
131
Zhijun He2a5df222017-04-04 18:20:38 -0700132// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700133#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700134
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700135// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
136#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
137
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700138// Whether to check for the GPU stride padding, or use the default
139//#define CHECK_GPU_PIXEL_ALIGNMENT
140
Thierry Strudel3d639192016-09-09 11:52:26 -0700141cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
142const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
143extern pthread_mutex_t gCamLock;
144volatile uint32_t gCamHal3LogLevel = 1;
145extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700146
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800147// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700148// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700149std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
151std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
152bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700155
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800156// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
159Mutex gHdrPlusClientLock; // Protect above Easel related variables.
160
Thierry Strudel3d639192016-09-09 11:52:26 -0700161
162const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163 {"On", CAM_CDS_MODE_ON},
164 {"Off", CAM_CDS_MODE_OFF},
165 {"Auto",CAM_CDS_MODE_AUTO}
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_video_hdr_mode_t,
169 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
171 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
172};
173
Thierry Strudel54dc9782017-02-15 12:12:10 -0800174const QCamera3HardwareInterface::QCameraMap<
175 camera_metadata_enum_android_binning_correction_mode_t,
176 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
178 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
179};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_ir_mode_t,
183 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
185 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187};
Thierry Strudel3d639192016-09-09 11:52:26 -0700188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_effect_mode_t,
191 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
193 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
194 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
195 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
197 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_awb_mode_t,
205 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
207 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
208 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
209 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
210 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
214 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_scene_mode_t,
219 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
221 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
222 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
223 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
227 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
228 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
229 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
230 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
231 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
232 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
233 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
234 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800235 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
236 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_control_af_mode_t,
241 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
244 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
245 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
246 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_color_correction_aberration_mode_t,
253 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255 CAM_COLOR_CORRECTION_ABERRATION_OFF },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257 CAM_COLOR_CORRECTION_ABERRATION_FAST },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260};
261
262const QCamera3HardwareInterface::QCameraMap<
263 camera_metadata_enum_android_control_ae_antibanding_mode_t,
264 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_control_ae_mode_t,
273 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
277 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
279};
280
281const QCamera3HardwareInterface::QCameraMap<
282 camera_metadata_enum_android_flash_mode_t,
283 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
284 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
285 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
286 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
287};
288
289const QCamera3HardwareInterface::QCameraMap<
290 camera_metadata_enum_android_statistics_face_detect_mode_t,
291 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
295};
296
297const QCamera3HardwareInterface::QCameraMap<
298 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
299 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
301 CAM_FOCUS_UNCALIBRATED },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
303 CAM_FOCUS_APPROXIMATE },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
305 CAM_FOCUS_CALIBRATED }
306};
307
308const QCamera3HardwareInterface::QCameraMap<
309 camera_metadata_enum_android_lens_state_t,
310 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
311 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
312 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
313};
314
315const int32_t available_thumbnail_sizes[] = {0, 0,
316 176, 144,
317 240, 144,
318 256, 144,
319 240, 160,
320 256, 154,
321 240, 240,
322 320, 240};
323
324const QCamera3HardwareInterface::QCameraMap<
325 camera_metadata_enum_android_sensor_test_pattern_mode_t,
326 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
333};
334
335/* Since there is no mapping for all the options some Android enum are not listed.
336 * Also, the order in this list is important because while mapping from HAL to Android it will
337 * traverse from lower to higher index which means that for HAL values that are map to different
338 * Android values, the traverse logic will select the first one found.
339 */
340const QCamera3HardwareInterface::QCameraMap<
341 camera_metadata_enum_android_sensor_reference_illuminant1_t,
342 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
359};
360
361const QCamera3HardwareInterface::QCameraMap<
362 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
363 { 60, CAM_HFR_MODE_60FPS},
364 { 90, CAM_HFR_MODE_90FPS},
365 { 120, CAM_HFR_MODE_120FPS},
366 { 150, CAM_HFR_MODE_150FPS},
367 { 180, CAM_HFR_MODE_180FPS},
368 { 210, CAM_HFR_MODE_210FPS},
369 { 240, CAM_HFR_MODE_240FPS},
370 { 480, CAM_HFR_MODE_480FPS},
371};
372
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700373const QCamera3HardwareInterface::QCameraMap<
374 qcamera3_ext_instant_aec_mode_t,
375 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
376 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
377 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
379};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800380
381const QCamera3HardwareInterface::QCameraMap<
382 qcamera3_ext_exposure_meter_mode_t,
383 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
384 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
386 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
387 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
388 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
390 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
391};
392
393const QCamera3HardwareInterface::QCameraMap<
394 qcamera3_ext_iso_mode_t,
395 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
396 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
397 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
398 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
399 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
400 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
401 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
402 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
403 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
404};
405
Thierry Strudel3d639192016-09-09 11:52:26 -0700406camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
407 .initialize = QCamera3HardwareInterface::initialize,
408 .configure_streams = QCamera3HardwareInterface::configure_streams,
409 .register_stream_buffers = NULL,
410 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
411 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
412 .get_metadata_vendor_tag_ops = NULL,
413 .dump = QCamera3HardwareInterface::dump,
414 .flush = QCamera3HardwareInterface::flush,
415 .reserved = {0},
416};
417
418// initialise to some default value
419uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
420
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700421static inline void logEaselEvent(const char *tag, const char *event) {
422 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
423 struct timespec ts = {};
424 static int64_t kMsPerSec = 1000;
425 static int64_t kNsPerMs = 1000000;
426 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
427 if (res != OK) {
428 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
429 } else {
430 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
431 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
432 }
433 }
434}
435
Thierry Strudel3d639192016-09-09 11:52:26 -0700436/*===========================================================================
437 * FUNCTION : QCamera3HardwareInterface
438 *
439 * DESCRIPTION: constructor of QCamera3HardwareInterface
440 *
441 * PARAMETERS :
442 * @cameraId : camera ID
443 *
444 * RETURN : none
445 *==========================================================================*/
446QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
447 const camera_module_callbacks_t *callbacks)
448 : mCameraId(cameraId),
449 mCameraHandle(NULL),
450 mCameraInitialized(false),
451 mCallbackOps(NULL),
452 mMetadataChannel(NULL),
453 mPictureChannel(NULL),
454 mRawChannel(NULL),
455 mSupportChannel(NULL),
456 mAnalysisChannel(NULL),
457 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700458 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700459 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800460 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100461 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800462 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mChannelHandle(0),
464 mFirstConfiguration(true),
465 mFlush(false),
466 mFlushPerf(false),
467 mParamHeap(NULL),
468 mParameters(NULL),
469 mPrevParameters(NULL),
470 m_bIsVideo(false),
471 m_bIs4KVideo(false),
472 m_bEisSupportedSize(false),
473 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800474 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700476 mShutterDispatcher(this),
477 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mMinProcessedFrameDuration(0),
479 mMinJpegFrameDuration(0),
480 mMinRawFrameDuration(0),
481 mMetaFrameCount(0U),
482 mUpdateDebugLevel(false),
483 mCallbacks(callbacks),
484 mCaptureIntent(0),
485 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700486 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800487 /* DevCamDebug metadata internal m control*/
488 mDevCamDebugMetaEnable(0),
489 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700490 mBatchSize(0),
491 mToBeQueuedVidBufs(0),
492 mHFRVideoFps(DEFAULT_VIDEO_FPS),
493 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800494 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800495 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700496 mFirstFrameNumberInBatch(0),
497 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800498 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700499 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
500 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000501 mPDSupported(false),
502 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700503 mInstantAEC(false),
504 mResetInstantAEC(false),
505 mInstantAECSettledFrameNumber(0),
506 mAecSkipDisplayFrameBound(0),
507 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800508 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700509 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700511 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mState(CLOSED),
513 mIsDeviceLinked(false),
514 mIsMainCamera(true),
515 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700516 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800517 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800518 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700519 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800520 mIsApInputUsedForHdrPlus(false),
521 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700522 m_bSensorHDREnabled(false),
523 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700524{
525 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700526 mCommon.init(gCamCapability[cameraId]);
527 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700528#ifndef USE_HAL_3_3
529 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
530#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700531 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700532#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700533 mCameraDevice.common.close = close_camera_device;
534 mCameraDevice.ops = &mCameraOps;
535 mCameraDevice.priv = this;
536 gCamCapability[cameraId]->version = CAM_HAL_V3;
537 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
538 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
539 gCamCapability[cameraId]->min_num_pp_bufs = 3;
540
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800541 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700542
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800543 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700544 mPendingLiveRequest = 0;
545 mCurrentRequestId = -1;
546 pthread_mutex_init(&mMutex, NULL);
547
548 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
549 mDefaultMetadata[i] = NULL;
550
551 // Getting system props of different kinds
552 char prop[PROPERTY_VALUE_MAX];
553 memset(prop, 0, sizeof(prop));
554 property_get("persist.camera.raw.dump", prop, "0");
555 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800556 property_get("persist.camera.hal3.force.hdr", prop, "0");
557 mForceHdrSnapshot = atoi(prop);
558
Thierry Strudel3d639192016-09-09 11:52:26 -0700559 if (mEnableRawDump)
560 LOGD("Raw dump from Camera HAL enabled");
561
562 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
563 memset(mLdafCalib, 0, sizeof(mLdafCalib));
564
565 memset(prop, 0, sizeof(prop));
566 property_get("persist.camera.tnr.preview", prop, "0");
567 m_bTnrPreview = (uint8_t)atoi(prop);
568
569 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800570 property_get("persist.camera.swtnr.preview", prop, "1");
571 m_bSwTnrPreview = (uint8_t)atoi(prop);
572
573 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700574 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700575 m_bTnrVideo = (uint8_t)atoi(prop);
576
577 memset(prop, 0, sizeof(prop));
578 property_get("persist.camera.avtimer.debug", prop, "0");
579 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800580 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700581
Thierry Strudel54dc9782017-02-15 12:12:10 -0800582 memset(prop, 0, sizeof(prop));
583 property_get("persist.camera.cacmode.disable", prop, "0");
584 m_cacModeDisabled = (uint8_t)atoi(prop);
585
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700586 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
587 m_MobicatMask = property_get_bool("persist.camera.mobicat", 0);
588
Thierry Strudel3d639192016-09-09 11:52:26 -0700589 //Load and read GPU library.
590 lib_surface_utils = NULL;
591 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700592 mSurfaceStridePadding = CAM_PAD_TO_64;
593#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700594 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
595 if (lib_surface_utils) {
596 *(void **)&LINK_get_surface_pixel_alignment =
597 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
598 if (LINK_get_surface_pixel_alignment) {
599 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
600 }
601 dlclose(lib_surface_utils);
602 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700603#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000604 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
605 mPDSupported = (0 <= mPDIndex) ? true : false;
606
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700607 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700608}
609
610/*===========================================================================
611 * FUNCTION : ~QCamera3HardwareInterface
612 *
613 * DESCRIPTION: destructor of QCamera3HardwareInterface
614 *
615 * PARAMETERS : none
616 *
617 * RETURN : none
618 *==========================================================================*/
619QCamera3HardwareInterface::~QCamera3HardwareInterface()
620{
621 LOGD("E");
622
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800623 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700624
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800625 // Disable power hint and enable the perf lock for close camera
626 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
627 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
628
629 // unlink of dualcam during close camera
630 if (mIsDeviceLinked) {
631 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
632 &m_pDualCamCmdPtr->bundle_info;
633 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
634 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
635 pthread_mutex_lock(&gCamLock);
636
637 if (mIsMainCamera == 1) {
638 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
639 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
640 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
641 // related session id should be session id of linked session
642 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
643 } else {
644 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
645 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
646 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
647 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
648 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800649 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800650 pthread_mutex_unlock(&gCamLock);
651
652 rc = mCameraHandle->ops->set_dual_cam_cmd(
653 mCameraHandle->camera_handle);
654 if (rc < 0) {
655 LOGE("Dualcam: Unlink failed, but still proceed to close");
656 }
657 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700658
659 /* We need to stop all streams before deleting any stream */
660 if (mRawDumpChannel) {
661 mRawDumpChannel->stop();
662 }
663
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700664 if (mHdrPlusRawSrcChannel) {
665 mHdrPlusRawSrcChannel->stop();
666 }
667
Thierry Strudel3d639192016-09-09 11:52:26 -0700668 // NOTE: 'camera3_stream_t *' objects are already freed at
669 // this stage by the framework
670 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
671 it != mStreamInfo.end(); it++) {
672 QCamera3ProcessingChannel *channel = (*it)->channel;
673 if (channel) {
674 channel->stop();
675 }
676 }
677 if (mSupportChannel)
678 mSupportChannel->stop();
679
680 if (mAnalysisChannel) {
681 mAnalysisChannel->stop();
682 }
683 if (mMetadataChannel) {
684 mMetadataChannel->stop();
685 }
686 if (mChannelHandle) {
687 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700688 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700689 LOGD("stopping channel %d", mChannelHandle);
690 }
691
692 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
693 it != mStreamInfo.end(); it++) {
694 QCamera3ProcessingChannel *channel = (*it)->channel;
695 if (channel)
696 delete channel;
697 free (*it);
698 }
699 if (mSupportChannel) {
700 delete mSupportChannel;
701 mSupportChannel = NULL;
702 }
703
704 if (mAnalysisChannel) {
705 delete mAnalysisChannel;
706 mAnalysisChannel = NULL;
707 }
708 if (mRawDumpChannel) {
709 delete mRawDumpChannel;
710 mRawDumpChannel = NULL;
711 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700712 if (mHdrPlusRawSrcChannel) {
713 delete mHdrPlusRawSrcChannel;
714 mHdrPlusRawSrcChannel = NULL;
715 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700716 if (mDummyBatchChannel) {
717 delete mDummyBatchChannel;
718 mDummyBatchChannel = NULL;
719 }
720
721 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800722 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700723
724 if (mMetadataChannel) {
725 delete mMetadataChannel;
726 mMetadataChannel = NULL;
727 }
728
729 /* Clean up all channels */
730 if (mCameraInitialized) {
731 if(!mFirstConfiguration){
732 //send the last unconfigure
733 cam_stream_size_info_t stream_config_info;
734 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
735 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
736 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800737 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700738 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700739 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700740 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
741 stream_config_info);
742 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
743 if (rc < 0) {
744 LOGE("set_parms failed for unconfigure");
745 }
746 }
747 deinitParameters();
748 }
749
750 if (mChannelHandle) {
751 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
752 mChannelHandle);
753 LOGH("deleting channel %d", mChannelHandle);
754 mChannelHandle = 0;
755 }
756
757 if (mState != CLOSED)
758 closeCamera();
759
760 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
761 req.mPendingBufferList.clear();
762 }
763 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700764 for (pendingRequestIterator i = mPendingRequestsList.begin();
765 i != mPendingRequestsList.end();) {
766 i = erasePendingRequest(i);
767 }
768 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
769 if (mDefaultMetadata[i])
770 free_camera_metadata(mDefaultMetadata[i]);
771
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800772 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700773
774 pthread_cond_destroy(&mRequestCond);
775
776 pthread_cond_destroy(&mBuffersCond);
777
778 pthread_mutex_destroy(&mMutex);
779 LOGD("X");
780}
781
782/*===========================================================================
783 * FUNCTION : erasePendingRequest
784 *
785 * DESCRIPTION: function to erase a desired pending request after freeing any
786 * allocated memory
787 *
788 * PARAMETERS :
789 * @i : iterator pointing to pending request to be erased
790 *
791 * RETURN : iterator pointing to the next request
792 *==========================================================================*/
793QCamera3HardwareInterface::pendingRequestIterator
794 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
795{
796 if (i->input_buffer != NULL) {
797 free(i->input_buffer);
798 i->input_buffer = NULL;
799 }
800 if (i->settings != NULL)
801 free_camera_metadata((camera_metadata_t*)i->settings);
802 return mPendingRequestsList.erase(i);
803}
804
805/*===========================================================================
806 * FUNCTION : camEvtHandle
807 *
808 * DESCRIPTION: Function registered to mm-camera-interface to handle events
809 *
810 * PARAMETERS :
811 * @camera_handle : interface layer camera handle
812 * @evt : ptr to event
813 * @user_data : user data ptr
814 *
815 * RETURN : none
816 *==========================================================================*/
817void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
818 mm_camera_event_t *evt,
819 void *user_data)
820{
821 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
822 if (obj && evt) {
823 switch(evt->server_event_type) {
824 case CAM_EVENT_TYPE_DAEMON_DIED:
825 pthread_mutex_lock(&obj->mMutex);
826 obj->mState = ERROR;
827 pthread_mutex_unlock(&obj->mMutex);
828 LOGE("Fatal, camera daemon died");
829 break;
830
831 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
832 LOGD("HAL got request pull from Daemon");
833 pthread_mutex_lock(&obj->mMutex);
834 obj->mWokenUpByDaemon = true;
835 obj->unblockRequestIfNecessary();
836 pthread_mutex_unlock(&obj->mMutex);
837 break;
838
839 default:
840 LOGW("Warning: Unhandled event %d",
841 evt->server_event_type);
842 break;
843 }
844 } else {
845 LOGE("NULL user_data/evt");
846 }
847}
848
849/*===========================================================================
850 * FUNCTION : openCamera
851 *
852 * DESCRIPTION: open camera
853 *
854 * PARAMETERS :
855 * @hw_device : double ptr for camera device struct
856 *
857 * RETURN : int32_t type of status
858 * NO_ERROR -- success
859 * none-zero failure code
860 *==========================================================================*/
861int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
862{
863 int rc = 0;
864 if (mState != CLOSED) {
865 *hw_device = NULL;
866 return PERMISSION_DENIED;
867 }
868
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700869 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800870 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700871 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
872 mCameraId);
873
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700874 if (mCameraHandle) {
875 LOGE("Failure: Camera already opened");
876 return ALREADY_EXISTS;
877 }
878
879 {
880 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700881 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700882 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen933db802017-07-14 14:31:53 -0700883 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700884 if (rc != 0) {
885 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
886 return rc;
887 }
888 }
889 }
890
Thierry Strudel3d639192016-09-09 11:52:26 -0700891 rc = openCamera();
892 if (rc == 0) {
893 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800894 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700895 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700896
897 // Suspend Easel because opening camera failed.
898 {
899 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700900 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
901 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700902 if (suspendErr != 0) {
903 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
904 strerror(-suspendErr), suspendErr);
905 }
906 }
907 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800908 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700909
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
911 mCameraId, rc);
912
913 if (rc == NO_ERROR) {
914 mState = OPENED;
915 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800916
Thierry Strudel3d639192016-09-09 11:52:26 -0700917 return rc;
918}
919
920/*===========================================================================
921 * FUNCTION : openCamera
922 *
923 * DESCRIPTION: open camera
924 *
925 * PARAMETERS : none
926 *
927 * RETURN : int32_t type of status
928 * NO_ERROR -- success
929 * none-zero failure code
930 *==========================================================================*/
931int QCamera3HardwareInterface::openCamera()
932{
933 int rc = 0;
934 char value[PROPERTY_VALUE_MAX];
935
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800936 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800937
Thierry Strudel3d639192016-09-09 11:52:26 -0700938 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
939 if (rc < 0) {
940 LOGE("Failed to reserve flash for camera id: %d",
941 mCameraId);
942 return UNKNOWN_ERROR;
943 }
944
945 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
946 if (rc) {
947 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
948 return rc;
949 }
950
951 if (!mCameraHandle) {
952 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
953 return -ENODEV;
954 }
955
956 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
957 camEvtHandle, (void *)this);
958
959 if (rc < 0) {
960 LOGE("Error, failed to register event callback");
961 /* Not closing camera here since it is already handled in destructor */
962 return FAILED_TRANSACTION;
963 }
964
965 mExifParams.debug_params =
966 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
967 if (mExifParams.debug_params) {
968 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
969 } else {
970 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
971 return NO_MEMORY;
972 }
973 mFirstConfiguration = true;
974
975 //Notify display HAL that a camera session is active.
976 //But avoid calling the same during bootup because camera service might open/close
977 //cameras at boot time during its initialization and display service will also internally
978 //wait for camera service to initialize first while calling this display API, resulting in a
979 //deadlock situation. Since boot time camera open/close calls are made only to fetch
980 //capabilities, no need of this display bw optimization.
981 //Use "service.bootanim.exit" property to know boot status.
982 property_get("service.bootanim.exit", value, "0");
983 if (atoi(value) == 1) {
984 pthread_mutex_lock(&gCamLock);
985 if (gNumCameraSessions++ == 0) {
986 setCameraLaunchStatus(true);
987 }
988 pthread_mutex_unlock(&gCamLock);
989 }
990
991 //fill the session id needed while linking dual cam
992 pthread_mutex_lock(&gCamLock);
993 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
994 &sessionId[mCameraId]);
995 pthread_mutex_unlock(&gCamLock);
996
997 if (rc < 0) {
998 LOGE("Error, failed to get sessiion id");
999 return UNKNOWN_ERROR;
1000 } else {
1001 //Allocate related cam sync buffer
1002 //this is needed for the payload that goes along with bundling cmd for related
1003 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001004 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1005 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001006 if(rc != OK) {
1007 rc = NO_MEMORY;
1008 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1009 return NO_MEMORY;
1010 }
1011
1012 //Map memory for related cam sync buffer
1013 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001014 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1015 m_pDualCamCmdHeap->getFd(0),
1016 sizeof(cam_dual_camera_cmd_info_t),
1017 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001018 if(rc < 0) {
1019 LOGE("Dualcam: failed to map Related cam sync buffer");
1020 rc = FAILED_TRANSACTION;
1021 return NO_MEMORY;
1022 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001023 m_pDualCamCmdPtr =
1024 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001025 }
1026
1027 LOGH("mCameraId=%d",mCameraId);
1028
1029 return NO_ERROR;
1030}
1031
1032/*===========================================================================
1033 * FUNCTION : closeCamera
1034 *
1035 * DESCRIPTION: close camera
1036 *
1037 * PARAMETERS : none
1038 *
1039 * RETURN : int32_t type of status
1040 * NO_ERROR -- success
1041 * none-zero failure code
1042 *==========================================================================*/
1043int QCamera3HardwareInterface::closeCamera()
1044{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001045 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001046 int rc = NO_ERROR;
1047 char value[PROPERTY_VALUE_MAX];
1048
1049 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1050 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001051
1052 // unmap memory for related cam sync buffer
1053 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001054 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001055 if (NULL != m_pDualCamCmdHeap) {
1056 m_pDualCamCmdHeap->deallocate();
1057 delete m_pDualCamCmdHeap;
1058 m_pDualCamCmdHeap = NULL;
1059 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001060 }
1061
Thierry Strudel3d639192016-09-09 11:52:26 -07001062 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1063 mCameraHandle = NULL;
1064
1065 //reset session id to some invalid id
1066 pthread_mutex_lock(&gCamLock);
1067 sessionId[mCameraId] = 0xDEADBEEF;
1068 pthread_mutex_unlock(&gCamLock);
1069
1070 //Notify display HAL that there is no active camera session
1071 //but avoid calling the same during bootup. Refer to openCamera
1072 //for more details.
1073 property_get("service.bootanim.exit", value, "0");
1074 if (atoi(value) == 1) {
1075 pthread_mutex_lock(&gCamLock);
1076 if (--gNumCameraSessions == 0) {
1077 setCameraLaunchStatus(false);
1078 }
1079 pthread_mutex_unlock(&gCamLock);
1080 }
1081
Thierry Strudel3d639192016-09-09 11:52:26 -07001082 if (mExifParams.debug_params) {
1083 free(mExifParams.debug_params);
1084 mExifParams.debug_params = NULL;
1085 }
1086 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1087 LOGW("Failed to release flash for camera id: %d",
1088 mCameraId);
1089 }
1090 mState = CLOSED;
1091 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1092 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001093
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001094 {
1095 Mutex::Autolock l(gHdrPlusClientLock);
1096 if (gHdrPlusClient != nullptr) {
1097 // Disable HDR+ mode.
1098 disableHdrPlusModeLocked();
1099 // Disconnect Easel if it's connected.
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001100 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001101 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001102 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001103
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001104 if (EaselManagerClientOpened) {
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001105 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001106 if (rc != 0) {
1107 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1108 }
1109
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001110 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001111 if (rc != 0) {
1112 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1113 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001114 }
1115 }
1116
Thierry Strudel3d639192016-09-09 11:52:26 -07001117 return rc;
1118}
1119
1120/*===========================================================================
1121 * FUNCTION : initialize
1122 *
1123 * DESCRIPTION: Initialize frameworks callback functions
1124 *
1125 * PARAMETERS :
1126 * @callback_ops : callback function to frameworks
1127 *
1128 * RETURN :
1129 *
1130 *==========================================================================*/
1131int QCamera3HardwareInterface::initialize(
1132 const struct camera3_callback_ops *callback_ops)
1133{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001134 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001135 int rc;
1136
1137 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1138 pthread_mutex_lock(&mMutex);
1139
1140 // Validate current state
1141 switch (mState) {
1142 case OPENED:
1143 /* valid state */
1144 break;
1145 default:
1146 LOGE("Invalid state %d", mState);
1147 rc = -ENODEV;
1148 goto err1;
1149 }
1150
1151 rc = initParameters();
1152 if (rc < 0) {
1153 LOGE("initParamters failed %d", rc);
1154 goto err1;
1155 }
1156 mCallbackOps = callback_ops;
1157
1158 mChannelHandle = mCameraHandle->ops->add_channel(
1159 mCameraHandle->camera_handle, NULL, NULL, this);
1160 if (mChannelHandle == 0) {
1161 LOGE("add_channel failed");
1162 rc = -ENOMEM;
1163 pthread_mutex_unlock(&mMutex);
1164 return rc;
1165 }
1166
1167 pthread_mutex_unlock(&mMutex);
1168 mCameraInitialized = true;
1169 mState = INITIALIZED;
1170 LOGI("X");
1171 return 0;
1172
1173err1:
1174 pthread_mutex_unlock(&mMutex);
1175 return rc;
1176}
1177
1178/*===========================================================================
1179 * FUNCTION : validateStreamDimensions
1180 *
1181 * DESCRIPTION: Check if the configuration requested are those advertised
1182 *
1183 * PARAMETERS :
1184 * @stream_list : streams to be configured
1185 *
1186 * RETURN :
1187 *
1188 *==========================================================================*/
1189int QCamera3HardwareInterface::validateStreamDimensions(
1190 camera3_stream_configuration_t *streamList)
1191{
1192 int rc = NO_ERROR;
1193 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001194 uint32_t depthWidth = 0;
1195 uint32_t depthHeight = 0;
1196 if (mPDSupported) {
1197 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1198 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1199 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001200
1201 camera3_stream_t *inputStream = NULL;
1202 /*
1203 * Loop through all streams to find input stream if it exists*
1204 */
1205 for (size_t i = 0; i< streamList->num_streams; i++) {
1206 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1207 if (inputStream != NULL) {
1208 LOGE("Error, Multiple input streams requested");
1209 return -EINVAL;
1210 }
1211 inputStream = streamList->streams[i];
1212 }
1213 }
1214 /*
1215 * Loop through all streams requested in configuration
1216 * Check if unsupported sizes have been requested on any of them
1217 */
1218 for (size_t j = 0; j < streamList->num_streams; j++) {
1219 bool sizeFound = false;
1220 camera3_stream_t *newStream = streamList->streams[j];
1221
1222 uint32_t rotatedHeight = newStream->height;
1223 uint32_t rotatedWidth = newStream->width;
1224 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1225 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1226 rotatedHeight = newStream->width;
1227 rotatedWidth = newStream->height;
1228 }
1229
1230 /*
1231 * Sizes are different for each type of stream format check against
1232 * appropriate table.
1233 */
1234 switch (newStream->format) {
1235 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1236 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1237 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001238 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1239 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1240 mPDSupported) {
1241 if ((depthWidth == newStream->width) &&
1242 (depthHeight == newStream->height)) {
1243 sizeFound = true;
1244 }
1245 break;
1246 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001247 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1248 for (size_t i = 0; i < count; i++) {
1249 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1250 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1251 sizeFound = true;
1252 break;
1253 }
1254 }
1255 break;
1256 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001257 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1258 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001259 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001260 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001261 if ((depthSamplesCount == newStream->width) &&
1262 (1 == newStream->height)) {
1263 sizeFound = true;
1264 }
1265 break;
1266 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001267 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1268 /* Verify set size against generated sizes table */
1269 for (size_t i = 0; i < count; i++) {
1270 if (((int32_t)rotatedWidth ==
1271 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1272 ((int32_t)rotatedHeight ==
1273 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1274 sizeFound = true;
1275 break;
1276 }
1277 }
1278 break;
1279 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1280 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1281 default:
1282 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1283 || newStream->stream_type == CAMERA3_STREAM_INPUT
1284 || IS_USAGE_ZSL(newStream->usage)) {
1285 if (((int32_t)rotatedWidth ==
1286 gCamCapability[mCameraId]->active_array_size.width) &&
1287 ((int32_t)rotatedHeight ==
1288 gCamCapability[mCameraId]->active_array_size.height)) {
1289 sizeFound = true;
1290 break;
1291 }
1292 /* We could potentially break here to enforce ZSL stream
1293 * set from frameworks always is full active array size
1294 * but it is not clear from the spc if framework will always
1295 * follow that, also we have logic to override to full array
1296 * size, so keeping the logic lenient at the moment
1297 */
1298 }
1299 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1300 MAX_SIZES_CNT);
1301 for (size_t i = 0; i < count; i++) {
1302 if (((int32_t)rotatedWidth ==
1303 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1304 ((int32_t)rotatedHeight ==
1305 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1306 sizeFound = true;
1307 break;
1308 }
1309 }
1310 break;
1311 } /* End of switch(newStream->format) */
1312
1313 /* We error out even if a single stream has unsupported size set */
1314 if (!sizeFound) {
1315 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1316 rotatedWidth, rotatedHeight, newStream->format,
1317 gCamCapability[mCameraId]->active_array_size.width,
1318 gCamCapability[mCameraId]->active_array_size.height);
1319 rc = -EINVAL;
1320 break;
1321 }
1322 } /* End of for each stream */
1323 return rc;
1324}
1325
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001326/*===========================================================================
1327 * FUNCTION : validateUsageFlags
1328 *
1329 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1330 *
1331 * PARAMETERS :
1332 * @stream_list : streams to be configured
1333 *
1334 * RETURN :
1335 * NO_ERROR if the usage flags are supported
1336 * error code if usage flags are not supported
1337 *
1338 *==========================================================================*/
1339int QCamera3HardwareInterface::validateUsageFlags(
1340 const camera3_stream_configuration_t* streamList)
1341{
1342 for (size_t j = 0; j < streamList->num_streams; j++) {
1343 const camera3_stream_t *newStream = streamList->streams[j];
1344
1345 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1346 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1347 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1348 continue;
1349 }
1350
Jason Leec4cf5032017-05-24 18:31:41 -07001351 // Here we only care whether it's EIS3 or not
1352 char is_type_value[PROPERTY_VALUE_MAX];
1353 property_get("persist.camera.is_type", is_type_value, "4");
1354 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1355 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1356 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1357 isType = IS_TYPE_NONE;
1358
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001359 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1360 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1361 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1362 bool forcePreviewUBWC = true;
1363 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1364 forcePreviewUBWC = false;
1365 }
1366 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001367 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001369 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001370 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001371 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001372
1373 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1374 // So color spaces will always match.
1375
1376 // Check whether underlying formats of shared streams match.
1377 if (isVideo && isPreview && videoFormat != previewFormat) {
1378 LOGE("Combined video and preview usage flag is not supported");
1379 return -EINVAL;
1380 }
1381 if (isPreview && isZSL && previewFormat != zslFormat) {
1382 LOGE("Combined preview and zsl usage flag is not supported");
1383 return -EINVAL;
1384 }
1385 if (isVideo && isZSL && videoFormat != zslFormat) {
1386 LOGE("Combined video and zsl usage flag is not supported");
1387 return -EINVAL;
1388 }
1389 }
1390 return NO_ERROR;
1391}
1392
1393/*===========================================================================
1394 * FUNCTION : validateUsageFlagsForEis
1395 *
1396 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1397 *
1398 * PARAMETERS :
1399 * @stream_list : streams to be configured
1400 *
1401 * RETURN :
1402 * NO_ERROR if the usage flags are supported
1403 * error code if usage flags are not supported
1404 *
1405 *==========================================================================*/
1406int QCamera3HardwareInterface::validateUsageFlagsForEis(
1407 const camera3_stream_configuration_t* streamList)
1408{
1409 for (size_t j = 0; j < streamList->num_streams; j++) {
1410 const camera3_stream_t *newStream = streamList->streams[j];
1411
1412 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1413 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1414
1415 // Because EIS is "hard-coded" for certain use case, and current
1416 // implementation doesn't support shared preview and video on the same
1417 // stream, return failure if EIS is forced on.
1418 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1419 LOGE("Combined video and preview usage flag is not supported due to EIS");
1420 return -EINVAL;
1421 }
1422 }
1423 return NO_ERROR;
1424}
1425
Thierry Strudel3d639192016-09-09 11:52:26 -07001426/*==============================================================================
1427 * FUNCTION : isSupportChannelNeeded
1428 *
1429 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1430 *
1431 * PARAMETERS :
1432 * @stream_list : streams to be configured
1433 * @stream_config_info : the config info for streams to be configured
1434 *
1435 * RETURN : Boolen true/false decision
1436 *
1437 *==========================================================================*/
1438bool QCamera3HardwareInterface::isSupportChannelNeeded(
1439 camera3_stream_configuration_t *streamList,
1440 cam_stream_size_info_t stream_config_info)
1441{
1442 uint32_t i;
1443 bool pprocRequested = false;
1444 /* Check for conditions where PProc pipeline does not have any streams*/
1445 for (i = 0; i < stream_config_info.num_streams; i++) {
1446 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1447 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1448 pprocRequested = true;
1449 break;
1450 }
1451 }
1452
1453 if (pprocRequested == false )
1454 return true;
1455
1456 /* Dummy stream needed if only raw or jpeg streams present */
1457 for (i = 0; i < streamList->num_streams; i++) {
1458 switch(streamList->streams[i]->format) {
1459 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1460 case HAL_PIXEL_FORMAT_RAW10:
1461 case HAL_PIXEL_FORMAT_RAW16:
1462 case HAL_PIXEL_FORMAT_BLOB:
1463 break;
1464 default:
1465 return false;
1466 }
1467 }
1468 return true;
1469}
1470
1471/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001472 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001473 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001474 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001475 *
1476 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001477 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001478 *
1479 * RETURN : int32_t type of status
1480 * NO_ERROR -- success
1481 * none-zero failure code
1482 *
1483 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001484int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001485{
1486 int32_t rc = NO_ERROR;
1487
1488 cam_dimension_t max_dim = {0, 0};
1489 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1490 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1491 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1492 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1493 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1494 }
1495
1496 clear_metadata_buffer(mParameters);
1497
1498 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1499 max_dim);
1500 if (rc != NO_ERROR) {
1501 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1502 return rc;
1503 }
1504
1505 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1506 if (rc != NO_ERROR) {
1507 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1508 return rc;
1509 }
1510
1511 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001512 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001513
1514 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1515 mParameters);
1516 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001517 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001518 return rc;
1519 }
1520
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001521 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001522 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1523 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1524 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1525 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1526 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001527
1528 return rc;
1529}
1530
1531/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001532 * FUNCTION : getCurrentSensorModeInfo
1533 *
1534 * DESCRIPTION: Get sensor mode information that is currently selected.
1535 *
1536 * PARAMETERS :
1537 * @sensorModeInfo : sensor mode information (output)
1538 *
1539 * RETURN : int32_t type of status
1540 * NO_ERROR -- success
1541 * none-zero failure code
1542 *
1543 *==========================================================================*/
1544int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1545{
1546 int32_t rc = NO_ERROR;
1547
1548 clear_metadata_buffer(mParameters);
1549 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1550
1551 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1552 mParameters);
1553 if (rc != NO_ERROR) {
1554 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1555 return rc;
1556 }
1557
1558 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1559 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1560 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1561 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1562 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1563 sensorModeInfo.num_raw_bits);
1564
1565 return rc;
1566}
1567
1568/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001569 * FUNCTION : addToPPFeatureMask
1570 *
1571 * DESCRIPTION: add additional features to pp feature mask based on
1572 * stream type and usecase
1573 *
1574 * PARAMETERS :
1575 * @stream_format : stream type for feature mask
1576 * @stream_idx : stream idx within postprocess_mask list to change
1577 *
1578 * RETURN : NULL
1579 *
1580 *==========================================================================*/
1581void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1582 uint32_t stream_idx)
1583{
1584 char feature_mask_value[PROPERTY_VALUE_MAX];
1585 cam_feature_mask_t feature_mask;
1586 int args_converted;
1587 int property_len;
1588
1589 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001590#ifdef _LE_CAMERA_
1591 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1592 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1593 property_len = property_get("persist.camera.hal3.feature",
1594 feature_mask_value, swtnr_feature_mask_value);
1595#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001596 property_len = property_get("persist.camera.hal3.feature",
1597 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001598#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001599 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1600 (feature_mask_value[1] == 'x')) {
1601 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1602 } else {
1603 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1604 }
1605 if (1 != args_converted) {
1606 feature_mask = 0;
1607 LOGE("Wrong feature mask %s", feature_mask_value);
1608 return;
1609 }
1610
1611 switch (stream_format) {
1612 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1613 /* Add LLVD to pp feature mask only if video hint is enabled */
1614 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1615 mStreamConfigInfo.postprocess_mask[stream_idx]
1616 |= CAM_QTI_FEATURE_SW_TNR;
1617 LOGH("Added SW TNR to pp feature mask");
1618 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1619 mStreamConfigInfo.postprocess_mask[stream_idx]
1620 |= CAM_QCOM_FEATURE_LLVD;
1621 LOGH("Added LLVD SeeMore to pp feature mask");
1622 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001623 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1624 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1625 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1626 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001627 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1628 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1629 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1630 CAM_QTI_FEATURE_BINNING_CORRECTION;
1631 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001632 break;
1633 }
1634 default:
1635 break;
1636 }
1637 LOGD("PP feature mask %llx",
1638 mStreamConfigInfo.postprocess_mask[stream_idx]);
1639}
1640
1641/*==============================================================================
1642 * FUNCTION : updateFpsInPreviewBuffer
1643 *
1644 * DESCRIPTION: update FPS information in preview buffer.
1645 *
1646 * PARAMETERS :
1647 * @metadata : pointer to metadata buffer
1648 * @frame_number: frame_number to look for in pending buffer list
1649 *
1650 * RETURN : None
1651 *
1652 *==========================================================================*/
1653void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1654 uint32_t frame_number)
1655{
1656 // Mark all pending buffers for this particular request
1657 // with corresponding framerate information
1658 for (List<PendingBuffersInRequest>::iterator req =
1659 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1660 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1661 for(List<PendingBufferInfo>::iterator j =
1662 req->mPendingBufferList.begin();
1663 j != req->mPendingBufferList.end(); j++) {
1664 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1665 if ((req->frame_number == frame_number) &&
1666 (channel->getStreamTypeMask() &
1667 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1668 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1669 CAM_INTF_PARM_FPS_RANGE, metadata) {
1670 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1671 struct private_handle_t *priv_handle =
1672 (struct private_handle_t *)(*(j->buffer));
1673 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1674 }
1675 }
1676 }
1677 }
1678}
1679
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001680/*==============================================================================
1681 * FUNCTION : updateTimeStampInPendingBuffers
1682 *
1683 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1684 * of a frame number
1685 *
1686 * PARAMETERS :
1687 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1688 * @timestamp : timestamp to be set
1689 *
1690 * RETURN : None
1691 *
1692 *==========================================================================*/
1693void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1694 uint32_t frameNumber, nsecs_t timestamp)
1695{
1696 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1697 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1698 if (req->frame_number != frameNumber)
1699 continue;
1700
1701 for (auto k = req->mPendingBufferList.begin();
1702 k != req->mPendingBufferList.end(); k++ ) {
1703 struct private_handle_t *priv_handle =
1704 (struct private_handle_t *) (*(k->buffer));
1705 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1706 }
1707 }
1708 return;
1709}
1710
Thierry Strudel3d639192016-09-09 11:52:26 -07001711/*===========================================================================
1712 * FUNCTION : configureStreams
1713 *
1714 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1715 * and output streams.
1716 *
1717 * PARAMETERS :
1718 * @stream_list : streams to be configured
1719 *
1720 * RETURN :
1721 *
1722 *==========================================================================*/
1723int QCamera3HardwareInterface::configureStreams(
1724 camera3_stream_configuration_t *streamList)
1725{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001726 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001727 int rc = 0;
1728
1729 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001730 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001731 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001732 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001733
1734 return rc;
1735}
1736
1737/*===========================================================================
1738 * FUNCTION : configureStreamsPerfLocked
1739 *
1740 * DESCRIPTION: configureStreams while perfLock is held.
1741 *
1742 * PARAMETERS :
1743 * @stream_list : streams to be configured
1744 *
1745 * RETURN : int32_t type of status
1746 * NO_ERROR -- success
1747 * none-zero failure code
1748 *==========================================================================*/
1749int QCamera3HardwareInterface::configureStreamsPerfLocked(
1750 camera3_stream_configuration_t *streamList)
1751{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001752 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001753 int rc = 0;
1754
1755 // Sanity check stream_list
1756 if (streamList == NULL) {
1757 LOGE("NULL stream configuration");
1758 return BAD_VALUE;
1759 }
1760 if (streamList->streams == NULL) {
1761 LOGE("NULL stream list");
1762 return BAD_VALUE;
1763 }
1764
1765 if (streamList->num_streams < 1) {
1766 LOGE("Bad number of streams requested: %d",
1767 streamList->num_streams);
1768 return BAD_VALUE;
1769 }
1770
1771 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1772 LOGE("Maximum number of streams %d exceeded: %d",
1773 MAX_NUM_STREAMS, streamList->num_streams);
1774 return BAD_VALUE;
1775 }
1776
Jason Leec4cf5032017-05-24 18:31:41 -07001777 mOpMode = streamList->operation_mode;
1778 LOGD("mOpMode: %d", mOpMode);
1779
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001780 rc = validateUsageFlags(streamList);
1781 if (rc != NO_ERROR) {
1782 return rc;
1783 }
1784
Thierry Strudel3d639192016-09-09 11:52:26 -07001785 /* first invalidate all the steams in the mStreamList
1786 * if they appear again, they will be validated */
1787 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1788 it != mStreamInfo.end(); it++) {
1789 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1790 if (channel) {
1791 channel->stop();
1792 }
1793 (*it)->status = INVALID;
1794 }
1795
1796 if (mRawDumpChannel) {
1797 mRawDumpChannel->stop();
1798 delete mRawDumpChannel;
1799 mRawDumpChannel = NULL;
1800 }
1801
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001802 if (mHdrPlusRawSrcChannel) {
1803 mHdrPlusRawSrcChannel->stop();
1804 delete mHdrPlusRawSrcChannel;
1805 mHdrPlusRawSrcChannel = NULL;
1806 }
1807
Thierry Strudel3d639192016-09-09 11:52:26 -07001808 if (mSupportChannel)
1809 mSupportChannel->stop();
1810
1811 if (mAnalysisChannel) {
1812 mAnalysisChannel->stop();
1813 }
1814 if (mMetadataChannel) {
1815 /* If content of mStreamInfo is not 0, there is metadata stream */
1816 mMetadataChannel->stop();
1817 }
1818 if (mChannelHandle) {
1819 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001820 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001821 LOGD("stopping channel %d", mChannelHandle);
1822 }
1823
1824 pthread_mutex_lock(&mMutex);
1825
1826 // Check state
1827 switch (mState) {
1828 case INITIALIZED:
1829 case CONFIGURED:
1830 case STARTED:
1831 /* valid state */
1832 break;
1833 default:
1834 LOGE("Invalid state %d", mState);
1835 pthread_mutex_unlock(&mMutex);
1836 return -ENODEV;
1837 }
1838
1839 /* Check whether we have video stream */
1840 m_bIs4KVideo = false;
1841 m_bIsVideo = false;
1842 m_bEisSupportedSize = false;
1843 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001844 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001845 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001846 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001847 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001848 uint32_t videoWidth = 0U;
1849 uint32_t videoHeight = 0U;
1850 size_t rawStreamCnt = 0;
1851 size_t stallStreamCnt = 0;
1852 size_t processedStreamCnt = 0;
1853 // Number of streams on ISP encoder path
1854 size_t numStreamsOnEncoder = 0;
1855 size_t numYuv888OnEncoder = 0;
1856 bool bYuv888OverrideJpeg = false;
1857 cam_dimension_t largeYuv888Size = {0, 0};
1858 cam_dimension_t maxViewfinderSize = {0, 0};
1859 bool bJpegExceeds4K = false;
1860 bool bJpegOnEncoder = false;
1861 bool bUseCommonFeatureMask = false;
1862 cam_feature_mask_t commonFeatureMask = 0;
1863 bool bSmallJpegSize = false;
1864 uint32_t width_ratio;
1865 uint32_t height_ratio;
1866 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1867 camera3_stream_t *inputStream = NULL;
1868 bool isJpeg = false;
1869 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001870 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001871 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001872
1873 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1874
1875 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001876 uint8_t eis_prop_set;
1877 uint32_t maxEisWidth = 0;
1878 uint32_t maxEisHeight = 0;
1879
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001880 // Initialize all instant AEC related variables
1881 mInstantAEC = false;
1882 mResetInstantAEC = false;
1883 mInstantAECSettledFrameNumber = 0;
1884 mAecSkipDisplayFrameBound = 0;
1885 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001886 mCurrFeatureState = 0;
1887 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001888
Thierry Strudel3d639192016-09-09 11:52:26 -07001889 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1890
1891 size_t count = IS_TYPE_MAX;
1892 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1893 for (size_t i = 0; i < count; i++) {
1894 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001895 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1896 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001897 break;
1898 }
1899 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001900
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001901 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001902 maxEisWidth = MAX_EIS_WIDTH;
1903 maxEisHeight = MAX_EIS_HEIGHT;
1904 }
1905
1906 /* EIS setprop control */
1907 char eis_prop[PROPERTY_VALUE_MAX];
1908 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001909 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 eis_prop_set = (uint8_t)atoi(eis_prop);
1911
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001912 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001913 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1914
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001915 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1916 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001917
Thierry Strudel3d639192016-09-09 11:52:26 -07001918 /* stream configurations */
1919 for (size_t i = 0; i < streamList->num_streams; i++) {
1920 camera3_stream_t *newStream = streamList->streams[i];
1921 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1922 "height = %d, rotation = %d, usage = 0x%x",
1923 i, newStream->stream_type, newStream->format,
1924 newStream->width, newStream->height, newStream->rotation,
1925 newStream->usage);
1926 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1927 newStream->stream_type == CAMERA3_STREAM_INPUT){
1928 isZsl = true;
1929 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001930 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1931 IS_USAGE_PREVIEW(newStream->usage)) {
1932 isPreview = true;
1933 }
1934
Thierry Strudel3d639192016-09-09 11:52:26 -07001935 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1936 inputStream = newStream;
1937 }
1938
Emilian Peev7650c122017-01-19 08:24:33 -08001939 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1940 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001941 isJpeg = true;
1942 jpegSize.width = newStream->width;
1943 jpegSize.height = newStream->height;
1944 if (newStream->width > VIDEO_4K_WIDTH ||
1945 newStream->height > VIDEO_4K_HEIGHT)
1946 bJpegExceeds4K = true;
1947 }
1948
1949 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1950 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1951 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001952 // In HAL3 we can have multiple different video streams.
1953 // The variables video width and height are used below as
1954 // dimensions of the biggest of them
1955 if (videoWidth < newStream->width ||
1956 videoHeight < newStream->height) {
1957 videoWidth = newStream->width;
1958 videoHeight = newStream->height;
1959 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001960 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1961 (VIDEO_4K_HEIGHT <= newStream->height)) {
1962 m_bIs4KVideo = true;
1963 }
1964 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1965 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001966
Thierry Strudel3d639192016-09-09 11:52:26 -07001967 }
1968 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1969 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1970 switch (newStream->format) {
1971 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001972 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1973 depthPresent = true;
1974 break;
1975 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001976 stallStreamCnt++;
1977 if (isOnEncoder(maxViewfinderSize, newStream->width,
1978 newStream->height)) {
1979 numStreamsOnEncoder++;
1980 bJpegOnEncoder = true;
1981 }
1982 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1983 newStream->width);
1984 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1985 newStream->height);;
1986 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1987 "FATAL: max_downscale_factor cannot be zero and so assert");
1988 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1989 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1990 LOGH("Setting small jpeg size flag to true");
1991 bSmallJpegSize = true;
1992 }
1993 break;
1994 case HAL_PIXEL_FORMAT_RAW10:
1995 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1996 case HAL_PIXEL_FORMAT_RAW16:
1997 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001998 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1999 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2000 pdStatCount++;
2001 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002002 break;
2003 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2004 processedStreamCnt++;
2005 if (isOnEncoder(maxViewfinderSize, newStream->width,
2006 newStream->height)) {
2007 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2008 !IS_USAGE_ZSL(newStream->usage)) {
2009 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2010 }
2011 numStreamsOnEncoder++;
2012 }
2013 break;
2014 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2015 processedStreamCnt++;
2016 if (isOnEncoder(maxViewfinderSize, newStream->width,
2017 newStream->height)) {
2018 // If Yuv888 size is not greater than 4K, set feature mask
2019 // to SUPERSET so that it support concurrent request on
2020 // YUV and JPEG.
2021 if (newStream->width <= VIDEO_4K_WIDTH &&
2022 newStream->height <= VIDEO_4K_HEIGHT) {
2023 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2024 }
2025 numStreamsOnEncoder++;
2026 numYuv888OnEncoder++;
2027 largeYuv888Size.width = newStream->width;
2028 largeYuv888Size.height = newStream->height;
2029 }
2030 break;
2031 default:
2032 processedStreamCnt++;
2033 if (isOnEncoder(maxViewfinderSize, newStream->width,
2034 newStream->height)) {
2035 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2036 numStreamsOnEncoder++;
2037 }
2038 break;
2039 }
2040
2041 }
2042 }
2043
2044 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2045 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2046 !m_bIsVideo) {
2047 m_bEisEnable = false;
2048 }
2049
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002050 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2051 pthread_mutex_unlock(&mMutex);
2052 return -EINVAL;
2053 }
2054
Thierry Strudel54dc9782017-02-15 12:12:10 -08002055 uint8_t forceEnableTnr = 0;
2056 char tnr_prop[PROPERTY_VALUE_MAX];
2057 memset(tnr_prop, 0, sizeof(tnr_prop));
2058 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2059 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2060
Thierry Strudel3d639192016-09-09 11:52:26 -07002061 /* Logic to enable/disable TNR based on specific config size/etc.*/
2062 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002063 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2064 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002065 else if (forceEnableTnr)
2066 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002067
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002068 char videoHdrProp[PROPERTY_VALUE_MAX];
2069 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2070 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2071 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2072
2073 if (hdr_mode_prop == 1 && m_bIsVideo &&
2074 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2075 m_bVideoHdrEnabled = true;
2076 else
2077 m_bVideoHdrEnabled = false;
2078
2079
Thierry Strudel3d639192016-09-09 11:52:26 -07002080 /* Check if num_streams is sane */
2081 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2082 rawStreamCnt > MAX_RAW_STREAMS ||
2083 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2084 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2085 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2086 pthread_mutex_unlock(&mMutex);
2087 return -EINVAL;
2088 }
2089 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002090 if (isZsl && m_bIs4KVideo) {
2091 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002092 pthread_mutex_unlock(&mMutex);
2093 return -EINVAL;
2094 }
2095 /* Check if stream sizes are sane */
2096 if (numStreamsOnEncoder > 2) {
2097 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2098 pthread_mutex_unlock(&mMutex);
2099 return -EINVAL;
2100 } else if (1 < numStreamsOnEncoder){
2101 bUseCommonFeatureMask = true;
2102 LOGH("Multiple streams above max viewfinder size, common mask needed");
2103 }
2104
2105 /* Check if BLOB size is greater than 4k in 4k recording case */
2106 if (m_bIs4KVideo && bJpegExceeds4K) {
2107 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2108 pthread_mutex_unlock(&mMutex);
2109 return -EINVAL;
2110 }
2111
Emilian Peev7650c122017-01-19 08:24:33 -08002112 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2113 depthPresent) {
2114 LOGE("HAL doesn't support depth streams in HFR mode!");
2115 pthread_mutex_unlock(&mMutex);
2116 return -EINVAL;
2117 }
2118
Thierry Strudel3d639192016-09-09 11:52:26 -07002119 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2120 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2121 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2122 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2123 // configurations:
2124 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2125 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2126 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2127 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2128 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2129 __func__);
2130 pthread_mutex_unlock(&mMutex);
2131 return -EINVAL;
2132 }
2133
2134 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2135 // the YUV stream's size is greater or equal to the JPEG size, set common
2136 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2137 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2138 jpegSize.width, jpegSize.height) &&
2139 largeYuv888Size.width > jpegSize.width &&
2140 largeYuv888Size.height > jpegSize.height) {
2141 bYuv888OverrideJpeg = true;
2142 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2143 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2144 }
2145
2146 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2147 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2148 commonFeatureMask);
2149 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2150 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2151
2152 rc = validateStreamDimensions(streamList);
2153 if (rc == NO_ERROR) {
2154 rc = validateStreamRotations(streamList);
2155 }
2156 if (rc != NO_ERROR) {
2157 LOGE("Invalid stream configuration requested!");
2158 pthread_mutex_unlock(&mMutex);
2159 return rc;
2160 }
2161
Emilian Peev0f3c3162017-03-15 12:57:46 +00002162 if (1 < pdStatCount) {
2163 LOGE("HAL doesn't support multiple PD streams");
2164 pthread_mutex_unlock(&mMutex);
2165 return -EINVAL;
2166 }
2167
2168 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2169 (1 == pdStatCount)) {
2170 LOGE("HAL doesn't support PD streams in HFR mode!");
2171 pthread_mutex_unlock(&mMutex);
2172 return -EINVAL;
2173 }
2174
Thierry Strudel3d639192016-09-09 11:52:26 -07002175 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2176 for (size_t i = 0; i < streamList->num_streams; i++) {
2177 camera3_stream_t *newStream = streamList->streams[i];
2178 LOGH("newStream type = %d, stream format = %d "
2179 "stream size : %d x %d, stream rotation = %d",
2180 newStream->stream_type, newStream->format,
2181 newStream->width, newStream->height, newStream->rotation);
2182 //if the stream is in the mStreamList validate it
2183 bool stream_exists = false;
2184 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2185 it != mStreamInfo.end(); it++) {
2186 if ((*it)->stream == newStream) {
2187 QCamera3ProcessingChannel *channel =
2188 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2189 stream_exists = true;
2190 if (channel)
2191 delete channel;
2192 (*it)->status = VALID;
2193 (*it)->stream->priv = NULL;
2194 (*it)->channel = NULL;
2195 }
2196 }
2197 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2198 //new stream
2199 stream_info_t* stream_info;
2200 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2201 if (!stream_info) {
2202 LOGE("Could not allocate stream info");
2203 rc = -ENOMEM;
2204 pthread_mutex_unlock(&mMutex);
2205 return rc;
2206 }
2207 stream_info->stream = newStream;
2208 stream_info->status = VALID;
2209 stream_info->channel = NULL;
2210 mStreamInfo.push_back(stream_info);
2211 }
2212 /* Covers Opaque ZSL and API1 F/W ZSL */
2213 if (IS_USAGE_ZSL(newStream->usage)
2214 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2215 if (zslStream != NULL) {
2216 LOGE("Multiple input/reprocess streams requested!");
2217 pthread_mutex_unlock(&mMutex);
2218 return BAD_VALUE;
2219 }
2220 zslStream = newStream;
2221 }
2222 /* Covers YUV reprocess */
2223 if (inputStream != NULL) {
2224 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2225 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2226 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2227 && inputStream->width == newStream->width
2228 && inputStream->height == newStream->height) {
2229 if (zslStream != NULL) {
2230 /* This scenario indicates multiple YUV streams with same size
2231 * as input stream have been requested, since zsl stream handle
2232 * is solely use for the purpose of overriding the size of streams
2233 * which share h/w streams we will just make a guess here as to
2234 * which of the stream is a ZSL stream, this will be refactored
2235 * once we make generic logic for streams sharing encoder output
2236 */
2237 LOGH("Warning, Multiple ip/reprocess streams requested!");
2238 }
2239 zslStream = newStream;
2240 }
2241 }
2242 }
2243
2244 /* If a zsl stream is set, we know that we have configured at least one input or
2245 bidirectional stream */
2246 if (NULL != zslStream) {
2247 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2248 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2249 mInputStreamInfo.format = zslStream->format;
2250 mInputStreamInfo.usage = zslStream->usage;
2251 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2252 mInputStreamInfo.dim.width,
2253 mInputStreamInfo.dim.height,
2254 mInputStreamInfo.format, mInputStreamInfo.usage);
2255 }
2256
2257 cleanAndSortStreamInfo();
2258 if (mMetadataChannel) {
2259 delete mMetadataChannel;
2260 mMetadataChannel = NULL;
2261 }
2262 if (mSupportChannel) {
2263 delete mSupportChannel;
2264 mSupportChannel = NULL;
2265 }
2266
2267 if (mAnalysisChannel) {
2268 delete mAnalysisChannel;
2269 mAnalysisChannel = NULL;
2270 }
2271
2272 if (mDummyBatchChannel) {
2273 delete mDummyBatchChannel;
2274 mDummyBatchChannel = NULL;
2275 }
2276
Emilian Peev7650c122017-01-19 08:24:33 -08002277 if (mDepthChannel) {
2278 mDepthChannel = NULL;
2279 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002280 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002281
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002282 mShutterDispatcher.clear();
2283 mOutputBufferDispatcher.clear();
2284
Thierry Strudel2896d122017-02-23 19:18:03 -08002285 char is_type_value[PROPERTY_VALUE_MAX];
2286 property_get("persist.camera.is_type", is_type_value, "4");
2287 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2288
Binhao Line406f062017-05-03 14:39:44 -07002289 char property_value[PROPERTY_VALUE_MAX];
2290 property_get("persist.camera.gzoom.at", property_value, "0");
2291 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002292 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2293 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2294 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2295 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002296
2297 property_get("persist.camera.gzoom.4k", property_value, "0");
2298 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2299
Thierry Strudel3d639192016-09-09 11:52:26 -07002300 //Create metadata channel and initialize it
2301 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2302 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2303 gCamCapability[mCameraId]->color_arrangement);
2304 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2305 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002306 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002307 if (mMetadataChannel == NULL) {
2308 LOGE("failed to allocate metadata channel");
2309 rc = -ENOMEM;
2310 pthread_mutex_unlock(&mMutex);
2311 return rc;
2312 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002313 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002314 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2315 if (rc < 0) {
2316 LOGE("metadata channel initialization failed");
2317 delete mMetadataChannel;
2318 mMetadataChannel = NULL;
2319 pthread_mutex_unlock(&mMutex);
2320 return rc;
2321 }
2322
Thierry Strudel2896d122017-02-23 19:18:03 -08002323 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002324 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002325 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002326 // Keep track of preview/video streams indices.
2327 // There could be more than one preview streams, but only one video stream.
2328 int32_t video_stream_idx = -1;
2329 int32_t preview_stream_idx[streamList->num_streams];
2330 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002331 bool previewTnr[streamList->num_streams];
2332 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2333 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2334 // Loop through once to determine preview TNR conditions before creating channels.
2335 for (size_t i = 0; i < streamList->num_streams; i++) {
2336 camera3_stream_t *newStream = streamList->streams[i];
2337 uint32_t stream_usage = newStream->usage;
2338 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2339 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2340 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2341 video_stream_idx = (int32_t)i;
2342 else
2343 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2344 }
2345 }
2346 // By default, preview stream TNR is disabled.
2347 // Enable TNR to the preview stream if all conditions below are satisfied:
2348 // 1. preview resolution == video resolution.
2349 // 2. video stream TNR is enabled.
2350 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2351 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2352 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2353 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2354 if (m_bTnrEnabled && m_bTnrVideo &&
2355 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2356 video_stream->width == preview_stream->width &&
2357 video_stream->height == preview_stream->height) {
2358 previewTnr[preview_stream_idx[i]] = true;
2359 }
2360 }
2361
Thierry Strudel3d639192016-09-09 11:52:26 -07002362 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2363 /* Allocate channel objects for the requested streams */
2364 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002365
Thierry Strudel3d639192016-09-09 11:52:26 -07002366 camera3_stream_t *newStream = streamList->streams[i];
2367 uint32_t stream_usage = newStream->usage;
2368 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2369 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2370 struct camera_info *p_info = NULL;
2371 pthread_mutex_lock(&gCamLock);
2372 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2373 pthread_mutex_unlock(&gCamLock);
2374 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2375 || IS_USAGE_ZSL(newStream->usage)) &&
2376 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002377 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002378 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002379 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2380 if (bUseCommonFeatureMask)
2381 zsl_ppmask = commonFeatureMask;
2382 else
2383 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002384 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002385 if (numStreamsOnEncoder > 0)
2386 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2387 else
2388 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002389 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002390 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002391 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002392 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002393 LOGH("Input stream configured, reprocess config");
2394 } else {
2395 //for non zsl streams find out the format
2396 switch (newStream->format) {
2397 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2398 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002399 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002400 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2401 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2402 /* add additional features to pp feature mask */
2403 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2404 mStreamConfigInfo.num_streams);
2405
2406 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2407 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2408 CAM_STREAM_TYPE_VIDEO;
2409 if (m_bTnrEnabled && m_bTnrVideo) {
2410 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2411 CAM_QCOM_FEATURE_CPP_TNR;
2412 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2413 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2414 ~CAM_QCOM_FEATURE_CDS;
2415 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2417 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2418 CAM_QTI_FEATURE_PPEISCORE;
2419 }
Binhao Line406f062017-05-03 14:39:44 -07002420 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2421 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2422 CAM_QCOM_FEATURE_GOOG_ZOOM;
2423 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002424 } else {
2425 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2426 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002427 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2429 CAM_QCOM_FEATURE_CPP_TNR;
2430 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2431 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2432 ~CAM_QCOM_FEATURE_CDS;
2433 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002434 if(!m_bSwTnrPreview) {
2435 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2436 ~CAM_QTI_FEATURE_SW_TNR;
2437 }
Binhao Line406f062017-05-03 14:39:44 -07002438 if (is_goog_zoom_preview_enabled) {
2439 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2440 CAM_QCOM_FEATURE_GOOG_ZOOM;
2441 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002442 padding_info.width_padding = mSurfaceStridePadding;
2443 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002444 previewSize.width = (int32_t)newStream->width;
2445 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002446 }
2447 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2448 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2449 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2450 newStream->height;
2451 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2452 newStream->width;
2453 }
2454 }
2455 break;
2456 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002457 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002458 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2459 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2460 if (bUseCommonFeatureMask)
2461 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2462 commonFeatureMask;
2463 else
2464 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2465 CAM_QCOM_FEATURE_NONE;
2466 } else {
2467 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2468 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2469 }
2470 break;
2471 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002472 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002473 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2474 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2475 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2477 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002478 /* Remove rotation if it is not supported
2479 for 4K LiveVideo snapshot case (online processing) */
2480 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2481 CAM_QCOM_FEATURE_ROTATION)) {
2482 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2483 &= ~CAM_QCOM_FEATURE_ROTATION;
2484 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002485 } else {
2486 if (bUseCommonFeatureMask &&
2487 isOnEncoder(maxViewfinderSize, newStream->width,
2488 newStream->height)) {
2489 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2490 } else {
2491 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2492 }
2493 }
2494 if (isZsl) {
2495 if (zslStream) {
2496 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2497 (int32_t)zslStream->width;
2498 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2499 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002500 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2501 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002502 } else {
2503 LOGE("Error, No ZSL stream identified");
2504 pthread_mutex_unlock(&mMutex);
2505 return -EINVAL;
2506 }
2507 } else if (m_bIs4KVideo) {
2508 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2509 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2510 } else if (bYuv888OverrideJpeg) {
2511 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2512 (int32_t)largeYuv888Size.width;
2513 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2514 (int32_t)largeYuv888Size.height;
2515 }
2516 break;
2517 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2518 case HAL_PIXEL_FORMAT_RAW16:
2519 case HAL_PIXEL_FORMAT_RAW10:
2520 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2521 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2522 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002523 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2524 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2525 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2526 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2527 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2528 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2529 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2530 gCamCapability[mCameraId]->dt[mPDIndex];
2531 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2532 gCamCapability[mCameraId]->vc[mPDIndex];
2533 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002534 break;
2535 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002536 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002537 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2538 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2539 break;
2540 }
2541 }
2542
2543 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2544 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2545 gCamCapability[mCameraId]->color_arrangement);
2546
2547 if (newStream->priv == NULL) {
2548 //New stream, construct channel
2549 switch (newStream->stream_type) {
2550 case CAMERA3_STREAM_INPUT:
2551 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2552 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2553 break;
2554 case CAMERA3_STREAM_BIDIRECTIONAL:
2555 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2556 GRALLOC_USAGE_HW_CAMERA_WRITE;
2557 break;
2558 case CAMERA3_STREAM_OUTPUT:
2559 /* For video encoding stream, set read/write rarely
2560 * flag so that they may be set to un-cached */
2561 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2562 newStream->usage |=
2563 (GRALLOC_USAGE_SW_READ_RARELY |
2564 GRALLOC_USAGE_SW_WRITE_RARELY |
2565 GRALLOC_USAGE_HW_CAMERA_WRITE);
2566 else if (IS_USAGE_ZSL(newStream->usage))
2567 {
2568 LOGD("ZSL usage flag skipping");
2569 }
2570 else if (newStream == zslStream
2571 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2572 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2573 } else
2574 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2575 break;
2576 default:
2577 LOGE("Invalid stream_type %d", newStream->stream_type);
2578 break;
2579 }
2580
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002581 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002582 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2583 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2584 QCamera3ProcessingChannel *channel = NULL;
2585 switch (newStream->format) {
2586 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2587 if ((newStream->usage &
2588 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2589 (streamList->operation_mode ==
2590 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2591 ) {
2592 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2593 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002594 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002595 this,
2596 newStream,
2597 (cam_stream_type_t)
2598 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2599 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2600 mMetadataChannel,
2601 0); //heap buffers are not required for HFR video channel
2602 if (channel == NULL) {
2603 LOGE("allocation of channel failed");
2604 pthread_mutex_unlock(&mMutex);
2605 return -ENOMEM;
2606 }
2607 //channel->getNumBuffers() will return 0 here so use
2608 //MAX_INFLIGH_HFR_REQUESTS
2609 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2610 newStream->priv = channel;
2611 LOGI("num video buffers in HFR mode: %d",
2612 MAX_INFLIGHT_HFR_REQUESTS);
2613 } else {
2614 /* Copy stream contents in HFR preview only case to create
2615 * dummy batch channel so that sensor streaming is in
2616 * HFR mode */
2617 if (!m_bIsVideo && (streamList->operation_mode ==
2618 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2619 mDummyBatchStream = *newStream;
2620 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002621 int bufferCount = MAX_INFLIGHT_REQUESTS;
2622 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2623 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002624 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2625 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2626 bufferCount = m_bIs4KVideo ?
2627 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2628 }
2629
Thierry Strudel2896d122017-02-23 19:18:03 -08002630 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002631 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2632 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002633 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002634 this,
2635 newStream,
2636 (cam_stream_type_t)
2637 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2638 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2639 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002640 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002641 if (channel == NULL) {
2642 LOGE("allocation of channel failed");
2643 pthread_mutex_unlock(&mMutex);
2644 return -ENOMEM;
2645 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002646 /* disable UBWC for preview, though supported,
2647 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002648 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002649 (previewSize.width == (int32_t)videoWidth)&&
2650 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002651 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002652 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002653 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002654 /* When goog_zoom is linked to the preview or video stream,
2655 * disable ubwc to the linked stream */
2656 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2657 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2658 channel->setUBWCEnabled(false);
2659 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002660 newStream->max_buffers = channel->getNumBuffers();
2661 newStream->priv = channel;
2662 }
2663 break;
2664 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2665 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2666 mChannelHandle,
2667 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002668 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002669 this,
2670 newStream,
2671 (cam_stream_type_t)
2672 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2673 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2674 mMetadataChannel);
2675 if (channel == NULL) {
2676 LOGE("allocation of YUV channel failed");
2677 pthread_mutex_unlock(&mMutex);
2678 return -ENOMEM;
2679 }
2680 newStream->max_buffers = channel->getNumBuffers();
2681 newStream->priv = channel;
2682 break;
2683 }
2684 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2685 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002686 case HAL_PIXEL_FORMAT_RAW10: {
2687 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2688 (HAL_DATASPACE_DEPTH != newStream->data_space))
2689 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002690 mRawChannel = new QCamera3RawChannel(
2691 mCameraHandle->camera_handle, mChannelHandle,
2692 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002693 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002694 this, newStream,
2695 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002696 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002697 if (mRawChannel == NULL) {
2698 LOGE("allocation of raw channel failed");
2699 pthread_mutex_unlock(&mMutex);
2700 return -ENOMEM;
2701 }
2702 newStream->max_buffers = mRawChannel->getNumBuffers();
2703 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2704 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002707 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2708 mDepthChannel = new QCamera3DepthChannel(
2709 mCameraHandle->camera_handle, mChannelHandle,
2710 mCameraHandle->ops, NULL, NULL, &padding_info,
2711 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2712 mMetadataChannel);
2713 if (NULL == mDepthChannel) {
2714 LOGE("Allocation of depth channel failed");
2715 pthread_mutex_unlock(&mMutex);
2716 return NO_MEMORY;
2717 }
2718 newStream->priv = mDepthChannel;
2719 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2720 } else {
2721 // Max live snapshot inflight buffer is 1. This is to mitigate
2722 // frame drop issues for video snapshot. The more buffers being
2723 // allocated, the more frame drops there are.
2724 mPictureChannel = new QCamera3PicChannel(
2725 mCameraHandle->camera_handle, mChannelHandle,
2726 mCameraHandle->ops, captureResultCb,
2727 setBufferErrorStatus, &padding_info, this, newStream,
2728 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2729 m_bIs4KVideo, isZsl, mMetadataChannel,
2730 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2731 if (mPictureChannel == NULL) {
2732 LOGE("allocation of channel failed");
2733 pthread_mutex_unlock(&mMutex);
2734 return -ENOMEM;
2735 }
2736 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2737 newStream->max_buffers = mPictureChannel->getNumBuffers();
2738 mPictureChannel->overrideYuvSize(
2739 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2740 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002741 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002742 break;
2743
2744 default:
2745 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002746 pthread_mutex_unlock(&mMutex);
2747 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002748 }
2749 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2750 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2751 } else {
2752 LOGE("Error, Unknown stream type");
2753 pthread_mutex_unlock(&mMutex);
2754 return -EINVAL;
2755 }
2756
2757 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002758 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002759 // Here we only care whether it's EIS3 or not
2760 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2761 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2762 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2763 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002764 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002765 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002766 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002767 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2768 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2769 }
2770 }
2771
2772 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2773 it != mStreamInfo.end(); it++) {
2774 if ((*it)->stream == newStream) {
2775 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2776 break;
2777 }
2778 }
2779 } else {
2780 // Channel already exists for this stream
2781 // Do nothing for now
2782 }
2783 padding_info = gCamCapability[mCameraId]->padding_info;
2784
Emilian Peev7650c122017-01-19 08:24:33 -08002785 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002786 * since there is no real stream associated with it
2787 */
Emilian Peev7650c122017-01-19 08:24:33 -08002788 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002789 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2790 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002791 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002792 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002793 }
2794
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002795 // Let buffer dispatcher know the configured streams.
2796 mOutputBufferDispatcher.configureStreams(streamList);
2797
Thierry Strudel2896d122017-02-23 19:18:03 -08002798 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2799 onlyRaw = false;
2800 }
2801
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002802 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002803 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002804 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002805 cam_analysis_info_t analysisInfo;
2806 int32_t ret = NO_ERROR;
2807 ret = mCommon.getAnalysisInfo(
2808 FALSE,
2809 analysisFeatureMask,
2810 &analysisInfo);
2811 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002812 cam_color_filter_arrangement_t analysis_color_arrangement =
2813 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2814 CAM_FILTER_ARRANGEMENT_Y :
2815 gCamCapability[mCameraId]->color_arrangement);
2816 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2817 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002818 cam_dimension_t analysisDim;
2819 analysisDim = mCommon.getMatchingDimension(previewSize,
2820 analysisInfo.analysis_recommended_res);
2821
2822 mAnalysisChannel = new QCamera3SupportChannel(
2823 mCameraHandle->camera_handle,
2824 mChannelHandle,
2825 mCameraHandle->ops,
2826 &analysisInfo.analysis_padding_info,
2827 analysisFeatureMask,
2828 CAM_STREAM_TYPE_ANALYSIS,
2829 &analysisDim,
2830 (analysisInfo.analysis_format
2831 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2832 : CAM_FORMAT_YUV_420_NV21),
2833 analysisInfo.hw_analysis_supported,
2834 gCamCapability[mCameraId]->color_arrangement,
2835 this,
2836 0); // force buffer count to 0
2837 } else {
2838 LOGW("getAnalysisInfo failed, ret = %d", ret);
2839 }
2840 if (!mAnalysisChannel) {
2841 LOGW("Analysis channel cannot be created");
2842 }
2843 }
2844
Thierry Strudel3d639192016-09-09 11:52:26 -07002845 //RAW DUMP channel
2846 if (mEnableRawDump && isRawStreamRequested == false){
2847 cam_dimension_t rawDumpSize;
2848 rawDumpSize = getMaxRawSize(mCameraId);
2849 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2850 setPAAFSupport(rawDumpFeatureMask,
2851 CAM_STREAM_TYPE_RAW,
2852 gCamCapability[mCameraId]->color_arrangement);
2853 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2854 mChannelHandle,
2855 mCameraHandle->ops,
2856 rawDumpSize,
2857 &padding_info,
2858 this, rawDumpFeatureMask);
2859 if (!mRawDumpChannel) {
2860 LOGE("Raw Dump channel cannot be created");
2861 pthread_mutex_unlock(&mMutex);
2862 return -ENOMEM;
2863 }
2864 }
2865
Thierry Strudel3d639192016-09-09 11:52:26 -07002866 if (mAnalysisChannel) {
2867 cam_analysis_info_t analysisInfo;
2868 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2869 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2870 CAM_STREAM_TYPE_ANALYSIS;
2871 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2872 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002873 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002874 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2875 &analysisInfo);
2876 if (rc != NO_ERROR) {
2877 LOGE("getAnalysisInfo failed, ret = %d", rc);
2878 pthread_mutex_unlock(&mMutex);
2879 return rc;
2880 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002881 cam_color_filter_arrangement_t analysis_color_arrangement =
2882 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2883 CAM_FILTER_ARRANGEMENT_Y :
2884 gCamCapability[mCameraId]->color_arrangement);
2885 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2886 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2887 analysis_color_arrangement);
2888
Thierry Strudel3d639192016-09-09 11:52:26 -07002889 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002890 mCommon.getMatchingDimension(previewSize,
2891 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002892 mStreamConfigInfo.num_streams++;
2893 }
2894
Thierry Strudel2896d122017-02-23 19:18:03 -08002895 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002896 cam_analysis_info_t supportInfo;
2897 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2898 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2899 setPAAFSupport(callbackFeatureMask,
2900 CAM_STREAM_TYPE_CALLBACK,
2901 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002902 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002903 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002904 if (ret != NO_ERROR) {
2905 /* Ignore the error for Mono camera
2906 * because the PAAF bit mask is only set
2907 * for CAM_STREAM_TYPE_ANALYSIS stream type
2908 */
2909 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2910 LOGW("getAnalysisInfo failed, ret = %d", ret);
2911 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002912 }
2913 mSupportChannel = new QCamera3SupportChannel(
2914 mCameraHandle->camera_handle,
2915 mChannelHandle,
2916 mCameraHandle->ops,
2917 &gCamCapability[mCameraId]->padding_info,
2918 callbackFeatureMask,
2919 CAM_STREAM_TYPE_CALLBACK,
2920 &QCamera3SupportChannel::kDim,
2921 CAM_FORMAT_YUV_420_NV21,
2922 supportInfo.hw_analysis_supported,
2923 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002924 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002925 if (!mSupportChannel) {
2926 LOGE("dummy channel cannot be created");
2927 pthread_mutex_unlock(&mMutex);
2928 return -ENOMEM;
2929 }
2930 }
2931
2932 if (mSupportChannel) {
2933 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2934 QCamera3SupportChannel::kDim;
2935 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2936 CAM_STREAM_TYPE_CALLBACK;
2937 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2938 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2939 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2940 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2941 gCamCapability[mCameraId]->color_arrangement);
2942 mStreamConfigInfo.num_streams++;
2943 }
2944
2945 if (mRawDumpChannel) {
2946 cam_dimension_t rawSize;
2947 rawSize = getMaxRawSize(mCameraId);
2948 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2949 rawSize;
2950 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2951 CAM_STREAM_TYPE_RAW;
2952 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2953 CAM_QCOM_FEATURE_NONE;
2954 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2955 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2956 gCamCapability[mCameraId]->color_arrangement);
2957 mStreamConfigInfo.num_streams++;
2958 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002959
2960 if (mHdrPlusRawSrcChannel) {
2961 cam_dimension_t rawSize;
2962 rawSize = getMaxRawSize(mCameraId);
2963 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2964 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2965 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2966 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2967 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2968 gCamCapability[mCameraId]->color_arrangement);
2969 mStreamConfigInfo.num_streams++;
2970 }
2971
Thierry Strudel3d639192016-09-09 11:52:26 -07002972 /* In HFR mode, if video stream is not added, create a dummy channel so that
2973 * ISP can create a batch mode even for preview only case. This channel is
2974 * never 'start'ed (no stream-on), it is only 'initialized' */
2975 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2976 !m_bIsVideo) {
2977 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2978 setPAAFSupport(dummyFeatureMask,
2979 CAM_STREAM_TYPE_VIDEO,
2980 gCamCapability[mCameraId]->color_arrangement);
2981 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2982 mChannelHandle,
2983 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002984 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002985 this,
2986 &mDummyBatchStream,
2987 CAM_STREAM_TYPE_VIDEO,
2988 dummyFeatureMask,
2989 mMetadataChannel);
2990 if (NULL == mDummyBatchChannel) {
2991 LOGE("creation of mDummyBatchChannel failed."
2992 "Preview will use non-hfr sensor mode ");
2993 }
2994 }
2995 if (mDummyBatchChannel) {
2996 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2997 mDummyBatchStream.width;
2998 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2999 mDummyBatchStream.height;
3000 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3001 CAM_STREAM_TYPE_VIDEO;
3002 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3003 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3004 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3005 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3006 gCamCapability[mCameraId]->color_arrangement);
3007 mStreamConfigInfo.num_streams++;
3008 }
3009
3010 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3011 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003012 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003013 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003014
3015 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3016 for (pendingRequestIterator i = mPendingRequestsList.begin();
3017 i != mPendingRequestsList.end();) {
3018 i = erasePendingRequest(i);
3019 }
3020 mPendingFrameDropList.clear();
3021 // Initialize/Reset the pending buffers list
3022 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3023 req.mPendingBufferList.clear();
3024 }
3025 mPendingBuffersMap.mPendingBuffersInRequest.clear();
3026
Thierry Strudel3d639192016-09-09 11:52:26 -07003027 mCurJpegMeta.clear();
3028 //Get min frame duration for this streams configuration
3029 deriveMinFrameDuration();
3030
Chien-Yu Chenee335912017-02-09 17:53:20 -08003031 mFirstPreviewIntentSeen = false;
3032
3033 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003034 {
3035 Mutex::Autolock l(gHdrPlusClientLock);
3036 disableHdrPlusModeLocked();
3037 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003038
Thierry Strudel3d639192016-09-09 11:52:26 -07003039 // Update state
3040 mState = CONFIGURED;
3041
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003042 mFirstMetadataCallback = true;
3043
Thierry Strudel3d639192016-09-09 11:52:26 -07003044 pthread_mutex_unlock(&mMutex);
3045
3046 return rc;
3047}
3048
3049/*===========================================================================
3050 * FUNCTION : validateCaptureRequest
3051 *
3052 * DESCRIPTION: validate a capture request from camera service
3053 *
3054 * PARAMETERS :
3055 * @request : request from framework to process
3056 *
3057 * RETURN :
3058 *
3059 *==========================================================================*/
3060int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003061 camera3_capture_request_t *request,
3062 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003063{
3064 ssize_t idx = 0;
3065 const camera3_stream_buffer_t *b;
3066 CameraMetadata meta;
3067
3068 /* Sanity check the request */
3069 if (request == NULL) {
3070 LOGE("NULL capture request");
3071 return BAD_VALUE;
3072 }
3073
3074 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3075 /*settings cannot be null for the first request*/
3076 return BAD_VALUE;
3077 }
3078
3079 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003080 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3081 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003082 LOGE("Request %d: No output buffers provided!",
3083 __FUNCTION__, frameNumber);
3084 return BAD_VALUE;
3085 }
3086 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3087 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3088 request->num_output_buffers, MAX_NUM_STREAMS);
3089 return BAD_VALUE;
3090 }
3091 if (request->input_buffer != NULL) {
3092 b = request->input_buffer;
3093 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3094 LOGE("Request %d: Buffer %ld: Status not OK!",
3095 frameNumber, (long)idx);
3096 return BAD_VALUE;
3097 }
3098 if (b->release_fence != -1) {
3099 LOGE("Request %d: Buffer %ld: Has a release fence!",
3100 frameNumber, (long)idx);
3101 return BAD_VALUE;
3102 }
3103 if (b->buffer == NULL) {
3104 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3105 frameNumber, (long)idx);
3106 return BAD_VALUE;
3107 }
3108 }
3109
3110 // Validate all buffers
3111 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003112 if (b == NULL) {
3113 return BAD_VALUE;
3114 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003115 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003116 QCamera3ProcessingChannel *channel =
3117 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3118 if (channel == NULL) {
3119 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3120 frameNumber, (long)idx);
3121 return BAD_VALUE;
3122 }
3123 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3124 LOGE("Request %d: Buffer %ld: Status not OK!",
3125 frameNumber, (long)idx);
3126 return BAD_VALUE;
3127 }
3128 if (b->release_fence != -1) {
3129 LOGE("Request %d: Buffer %ld: Has a release fence!",
3130 frameNumber, (long)idx);
3131 return BAD_VALUE;
3132 }
3133 if (b->buffer == NULL) {
3134 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3135 frameNumber, (long)idx);
3136 return BAD_VALUE;
3137 }
3138 if (*(b->buffer) == NULL) {
3139 LOGE("Request %d: Buffer %ld: NULL private handle!",
3140 frameNumber, (long)idx);
3141 return BAD_VALUE;
3142 }
3143 idx++;
3144 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003145 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003146 return NO_ERROR;
3147}
3148
3149/*===========================================================================
3150 * FUNCTION : deriveMinFrameDuration
3151 *
3152 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3153 * on currently configured streams.
3154 *
3155 * PARAMETERS : NONE
3156 *
3157 * RETURN : NONE
3158 *
3159 *==========================================================================*/
3160void QCamera3HardwareInterface::deriveMinFrameDuration()
3161{
3162 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003163 bool hasRaw = false;
3164
3165 mMinRawFrameDuration = 0;
3166 mMinJpegFrameDuration = 0;
3167 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003168
3169 maxJpegDim = 0;
3170 maxProcessedDim = 0;
3171 maxRawDim = 0;
3172
3173 // Figure out maximum jpeg, processed, and raw dimensions
3174 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3175 it != mStreamInfo.end(); it++) {
3176
3177 // Input stream doesn't have valid stream_type
3178 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3179 continue;
3180
3181 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3182 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3183 if (dimension > maxJpegDim)
3184 maxJpegDim = dimension;
3185 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3186 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3187 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003188 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003189 if (dimension > maxRawDim)
3190 maxRawDim = dimension;
3191 } else {
3192 if (dimension > maxProcessedDim)
3193 maxProcessedDim = dimension;
3194 }
3195 }
3196
3197 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3198 MAX_SIZES_CNT);
3199
3200 //Assume all jpeg dimensions are in processed dimensions.
3201 if (maxJpegDim > maxProcessedDim)
3202 maxProcessedDim = maxJpegDim;
3203 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003204 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003205 maxRawDim = INT32_MAX;
3206
3207 for (size_t i = 0; i < count; i++) {
3208 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3209 gCamCapability[mCameraId]->raw_dim[i].height;
3210 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3211 maxRawDim = dimension;
3212 }
3213 }
3214
3215 //Find minimum durations for processed, jpeg, and raw
3216 for (size_t i = 0; i < count; i++) {
3217 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3218 gCamCapability[mCameraId]->raw_dim[i].height) {
3219 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3220 break;
3221 }
3222 }
3223 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3224 for (size_t i = 0; i < count; i++) {
3225 if (maxProcessedDim ==
3226 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3227 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3228 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3229 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3230 break;
3231 }
3232 }
3233}
3234
3235/*===========================================================================
3236 * FUNCTION : getMinFrameDuration
3237 *
3238 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3239 * and current request configuration.
3240 *
3241 * PARAMETERS : @request: requset sent by the frameworks
3242 *
3243 * RETURN : min farme duration for a particular request
3244 *
3245 *==========================================================================*/
3246int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3247{
3248 bool hasJpegStream = false;
3249 bool hasRawStream = false;
3250 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3251 const camera3_stream_t *stream = request->output_buffers[i].stream;
3252 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3253 hasJpegStream = true;
3254 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3255 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3256 stream->format == HAL_PIXEL_FORMAT_RAW16)
3257 hasRawStream = true;
3258 }
3259
3260 if (!hasJpegStream)
3261 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3262 else
3263 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3264}
3265
3266/*===========================================================================
3267 * FUNCTION : handleBuffersDuringFlushLock
3268 *
3269 * DESCRIPTION: Account for buffers returned from back-end during flush
3270 * This function is executed while mMutex is held by the caller.
3271 *
3272 * PARAMETERS :
3273 * @buffer: image buffer for the callback
3274 *
3275 * RETURN :
3276 *==========================================================================*/
3277void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3278{
3279 bool buffer_found = false;
3280 for (List<PendingBuffersInRequest>::iterator req =
3281 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3282 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3283 for (List<PendingBufferInfo>::iterator i =
3284 req->mPendingBufferList.begin();
3285 i != req->mPendingBufferList.end(); i++) {
3286 if (i->buffer == buffer->buffer) {
3287 mPendingBuffersMap.numPendingBufsAtFlush--;
3288 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3289 buffer->buffer, req->frame_number,
3290 mPendingBuffersMap.numPendingBufsAtFlush);
3291 buffer_found = true;
3292 break;
3293 }
3294 }
3295 if (buffer_found) {
3296 break;
3297 }
3298 }
3299 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3300 //signal the flush()
3301 LOGD("All buffers returned to HAL. Continue flush");
3302 pthread_cond_signal(&mBuffersCond);
3303 }
3304}
3305
Thierry Strudel3d639192016-09-09 11:52:26 -07003306/*===========================================================================
3307 * FUNCTION : handleBatchMetadata
3308 *
3309 * DESCRIPTION: Handles metadata buffer callback in batch mode
3310 *
3311 * PARAMETERS : @metadata_buf: metadata buffer
3312 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3313 * the meta buf in this method
3314 *
3315 * RETURN :
3316 *
3317 *==========================================================================*/
3318void QCamera3HardwareInterface::handleBatchMetadata(
3319 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3320{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003321 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003322
3323 if (NULL == metadata_buf) {
3324 LOGE("metadata_buf is NULL");
3325 return;
3326 }
3327 /* In batch mode, the metdata will contain the frame number and timestamp of
3328 * the last frame in the batch. Eg: a batch containing buffers from request
3329 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3330 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3331 * multiple process_capture_results */
3332 metadata_buffer_t *metadata =
3333 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3334 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3335 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3336 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3337 uint32_t frame_number = 0, urgent_frame_number = 0;
3338 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3339 bool invalid_metadata = false;
3340 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3341 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003342 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003343
3344 int32_t *p_frame_number_valid =
3345 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3346 uint32_t *p_frame_number =
3347 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3348 int64_t *p_capture_time =
3349 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3350 int32_t *p_urgent_frame_number_valid =
3351 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3352 uint32_t *p_urgent_frame_number =
3353 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3354
3355 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3356 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3357 (NULL == p_urgent_frame_number)) {
3358 LOGE("Invalid metadata");
3359 invalid_metadata = true;
3360 } else {
3361 frame_number_valid = *p_frame_number_valid;
3362 last_frame_number = *p_frame_number;
3363 last_frame_capture_time = *p_capture_time;
3364 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3365 last_urgent_frame_number = *p_urgent_frame_number;
3366 }
3367
3368 /* In batchmode, when no video buffers are requested, set_parms are sent
3369 * for every capture_request. The difference between consecutive urgent
3370 * frame numbers and frame numbers should be used to interpolate the
3371 * corresponding frame numbers and time stamps */
3372 pthread_mutex_lock(&mMutex);
3373 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003374 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3375 if(idx < 0) {
3376 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3377 last_urgent_frame_number);
3378 mState = ERROR;
3379 pthread_mutex_unlock(&mMutex);
3380 return;
3381 }
3382 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003383 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3384 first_urgent_frame_number;
3385
3386 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3387 urgent_frame_number_valid,
3388 first_urgent_frame_number, last_urgent_frame_number);
3389 }
3390
3391 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003392 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3393 if(idx < 0) {
3394 LOGE("Invalid frame number received: %d. Irrecoverable error",
3395 last_frame_number);
3396 mState = ERROR;
3397 pthread_mutex_unlock(&mMutex);
3398 return;
3399 }
3400 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003401 frameNumDiff = last_frame_number + 1 -
3402 first_frame_number;
3403 mPendingBatchMap.removeItem(last_frame_number);
3404
3405 LOGD("frm: valid: %d frm_num: %d - %d",
3406 frame_number_valid,
3407 first_frame_number, last_frame_number);
3408
3409 }
3410 pthread_mutex_unlock(&mMutex);
3411
3412 if (urgent_frame_number_valid || frame_number_valid) {
3413 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3414 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3415 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3416 urgentFrameNumDiff, last_urgent_frame_number);
3417 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3418 LOGE("frameNumDiff: %d frameNum: %d",
3419 frameNumDiff, last_frame_number);
3420 }
3421
3422 for (size_t i = 0; i < loopCount; i++) {
3423 /* handleMetadataWithLock is called even for invalid_metadata for
3424 * pipeline depth calculation */
3425 if (!invalid_metadata) {
3426 /* Infer frame number. Batch metadata contains frame number of the
3427 * last frame */
3428 if (urgent_frame_number_valid) {
3429 if (i < urgentFrameNumDiff) {
3430 urgent_frame_number =
3431 first_urgent_frame_number + i;
3432 LOGD("inferred urgent frame_number: %d",
3433 urgent_frame_number);
3434 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3435 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3436 } else {
3437 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3438 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3439 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3440 }
3441 }
3442
3443 /* Infer frame number. Batch metadata contains frame number of the
3444 * last frame */
3445 if (frame_number_valid) {
3446 if (i < frameNumDiff) {
3447 frame_number = first_frame_number + i;
3448 LOGD("inferred frame_number: %d", frame_number);
3449 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3450 CAM_INTF_META_FRAME_NUMBER, frame_number);
3451 } else {
3452 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3453 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3454 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3455 }
3456 }
3457
3458 if (last_frame_capture_time) {
3459 //Infer timestamp
3460 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003461 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003462 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003463 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003464 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3465 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3466 LOGD("batch capture_time: %lld, capture_time: %lld",
3467 last_frame_capture_time, capture_time);
3468 }
3469 }
3470 pthread_mutex_lock(&mMutex);
3471 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003472 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003473 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3474 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003475 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003476 pthread_mutex_unlock(&mMutex);
3477 }
3478
3479 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003480 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003481 mMetadataChannel->bufDone(metadata_buf);
3482 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003483 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003484 }
3485}
3486
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003487void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3488 camera3_error_msg_code_t errorCode)
3489{
3490 camera3_notify_msg_t notify_msg;
3491 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3492 notify_msg.type = CAMERA3_MSG_ERROR;
3493 notify_msg.message.error.error_code = errorCode;
3494 notify_msg.message.error.error_stream = NULL;
3495 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003496 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003497
3498 return;
3499}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003500
3501/*===========================================================================
3502 * FUNCTION : sendPartialMetadataWithLock
3503 *
3504 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3505 *
3506 * PARAMETERS : @metadata: metadata buffer
3507 * @requestIter: The iterator for the pending capture request for
3508 * which the partial result is being sen
3509 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3510 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003511 * @isJumpstartMetadata: Whether this is a partial metadata for
3512 * jumpstart, i.e. even though it doesn't map to a valid partial
3513 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003514 *
3515 * RETURN :
3516 *
3517 *==========================================================================*/
3518
3519void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3520 metadata_buffer_t *metadata,
3521 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003522 bool lastUrgentMetadataInBatch,
3523 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003524{
3525 camera3_capture_result_t result;
3526 memset(&result, 0, sizeof(camera3_capture_result_t));
3527
3528 requestIter->partial_result_cnt++;
3529
3530 // Extract 3A metadata
3531 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003532 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3533 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003534 // Populate metadata result
3535 result.frame_number = requestIter->frame_number;
3536 result.num_output_buffers = 0;
3537 result.output_buffers = NULL;
3538 result.partial_result = requestIter->partial_result_cnt;
3539
3540 {
3541 Mutex::Autolock l(gHdrPlusClientLock);
3542 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3543 // Notify HDR+ client about the partial metadata.
3544 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3545 result.partial_result == PARTIAL_RESULT_COUNT);
3546 }
3547 }
3548
3549 orchestrateResult(&result);
3550 LOGD("urgent frame_number = %u", result.frame_number);
3551 free_camera_metadata((camera_metadata_t *)result.result);
3552}
3553
Thierry Strudel3d639192016-09-09 11:52:26 -07003554/*===========================================================================
3555 * FUNCTION : handleMetadataWithLock
3556 *
3557 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3558 *
3559 * PARAMETERS : @metadata_buf: metadata buffer
3560 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3561 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003562 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3563 * last urgent metadata in a batch. Always true for non-batch mode
3564 * @lastMetadataInBatch: Boolean to indicate whether this is the
3565 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003566 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3567 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003568 *
3569 * RETURN :
3570 *
3571 *==========================================================================*/
3572void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003573 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003574 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3575 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003576{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003577 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003578 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3579 //during flush do not send metadata from this thread
3580 LOGD("not sending metadata during flush or when mState is error");
3581 if (free_and_bufdone_meta_buf) {
3582 mMetadataChannel->bufDone(metadata_buf);
3583 free(metadata_buf);
3584 }
3585 return;
3586 }
3587
3588 //not in flush
3589 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3590 int32_t frame_number_valid, urgent_frame_number_valid;
3591 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003592 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003593 nsecs_t currentSysTime;
3594
3595 int32_t *p_frame_number_valid =
3596 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3597 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3598 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003599 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003600 int32_t *p_urgent_frame_number_valid =
3601 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3602 uint32_t *p_urgent_frame_number =
3603 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3604 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3605 metadata) {
3606 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3607 *p_frame_number_valid, *p_frame_number);
3608 }
3609
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003610 camera_metadata_t *resultMetadata = nullptr;
3611
Thierry Strudel3d639192016-09-09 11:52:26 -07003612 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3613 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3614 LOGE("Invalid metadata");
3615 if (free_and_bufdone_meta_buf) {
3616 mMetadataChannel->bufDone(metadata_buf);
3617 free(metadata_buf);
3618 }
3619 goto done_metadata;
3620 }
3621 frame_number_valid = *p_frame_number_valid;
3622 frame_number = *p_frame_number;
3623 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003624 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003625 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3626 urgent_frame_number = *p_urgent_frame_number;
3627 currentSysTime = systemTime(CLOCK_MONOTONIC);
3628
Jason Lee603176d2017-05-31 11:43:27 -07003629 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3630 const int tries = 3;
3631 nsecs_t bestGap, measured;
3632 for (int i = 0; i < tries; ++i) {
3633 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3634 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3635 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3636 const nsecs_t gap = tmono2 - tmono;
3637 if (i == 0 || gap < bestGap) {
3638 bestGap = gap;
3639 measured = tbase - ((tmono + tmono2) >> 1);
3640 }
3641 }
3642 capture_time -= measured;
3643 }
3644
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 // Detect if buffers from any requests are overdue
3646 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003647 int64_t timeout;
3648 {
3649 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3650 // If there is a pending HDR+ request, the following requests may be blocked until the
3651 // HDR+ request is done. So allow a longer timeout.
3652 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3653 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3654 }
3655
3656 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003657 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003658 assert(missed.stream->priv);
3659 if (missed.stream->priv) {
3660 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3661 assert(ch->mStreams[0]);
3662 if (ch->mStreams[0]) {
3663 LOGE("Cancel missing frame = %d, buffer = %p,"
3664 "stream type = %d, stream format = %d",
3665 req.frame_number, missed.buffer,
3666 ch->mStreams[0]->getMyType(), missed.stream->format);
3667 ch->timeoutFrame(req.frame_number);
3668 }
3669 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003670 }
3671 }
3672 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003673 //For the very first metadata callback, regardless whether it contains valid
3674 //frame number, send the partial metadata for the jumpstarting requests.
3675 //Note that this has to be done even if the metadata doesn't contain valid
3676 //urgent frame number, because in the case only 1 request is ever submitted
3677 //to HAL, there won't be subsequent valid urgent frame number.
3678 if (mFirstMetadataCallback) {
3679 for (pendingRequestIterator i =
3680 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3681 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003682 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3683 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003684 }
3685 }
3686 mFirstMetadataCallback = false;
3687 }
3688
Thierry Strudel3d639192016-09-09 11:52:26 -07003689 //Partial result on process_capture_result for timestamp
3690 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003691 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003692
3693 //Recieved an urgent Frame Number, handle it
3694 //using partial results
3695 for (pendingRequestIterator i =
3696 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3697 LOGD("Iterator Frame = %d urgent frame = %d",
3698 i->frame_number, urgent_frame_number);
3699
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003700 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003701 (i->partial_result_cnt == 0)) {
3702 LOGE("Error: HAL missed urgent metadata for frame number %d",
3703 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003704 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003705 }
3706
3707 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003708 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003709 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3710 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003711 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3712 // Instant AEC settled for this frame.
3713 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3714 mInstantAECSettledFrameNumber = urgent_frame_number;
3715 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003716 break;
3717 }
3718 }
3719 }
3720
3721 if (!frame_number_valid) {
3722 LOGD("Not a valid normal frame number, used as SOF only");
3723 if (free_and_bufdone_meta_buf) {
3724 mMetadataChannel->bufDone(metadata_buf);
3725 free(metadata_buf);
3726 }
3727 goto done_metadata;
3728 }
3729 LOGH("valid frame_number = %u, capture_time = %lld",
3730 frame_number, capture_time);
3731
Emilian Peev4e0fe952017-06-30 12:40:09 -07003732 handleDepthDataLocked(metadata->depth_data, frame_number,
3733 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003734
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003735 // Check whether any stream buffer corresponding to this is dropped or not
3736 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3737 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3738 for (auto & pendingRequest : mPendingRequestsList) {
3739 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3740 mInstantAECSettledFrameNumber)) {
3741 camera3_notify_msg_t notify_msg = {};
3742 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003743 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003744 QCamera3ProcessingChannel *channel =
3745 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003746 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003747 if (p_cam_frame_drop) {
3748 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003749 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003750 // Got the stream ID for drop frame.
3751 dropFrame = true;
3752 break;
3753 }
3754 }
3755 } else {
3756 // This is instant AEC case.
3757 // For instant AEC drop the stream untill AEC is settled.
3758 dropFrame = true;
3759 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003760
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003761 if (dropFrame) {
3762 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3763 if (p_cam_frame_drop) {
3764 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003765 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003766 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003767 } else {
3768 // For instant AEC, inform frame drop and frame number
3769 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3770 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003771 pendingRequest.frame_number, streamID,
3772 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003773 }
3774 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003775 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003776 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003777 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003778 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003779 if (p_cam_frame_drop) {
3780 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003781 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003782 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003783 } else {
3784 // For instant AEC, inform frame drop and frame number
3785 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3786 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003787 pendingRequest.frame_number, streamID,
3788 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003789 }
3790 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003791 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003792 PendingFrameDrop.stream_ID = streamID;
3793 // Add the Frame drop info to mPendingFrameDropList
3794 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003795 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003796 }
3797 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003798 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003799
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003800 for (auto & pendingRequest : mPendingRequestsList) {
3801 // Find the pending request with the frame number.
3802 if (pendingRequest.frame_number == frame_number) {
3803 // Update the sensor timestamp.
3804 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003805
Thierry Strudel3d639192016-09-09 11:52:26 -07003806
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003807 /* Set the timestamp in display metadata so that clients aware of
3808 private_handle such as VT can use this un-modified timestamps.
3809 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003810 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003811
Thierry Strudel3d639192016-09-09 11:52:26 -07003812 // Find channel requiring metadata, meaning internal offline postprocess
3813 // is needed.
3814 //TODO: for now, we don't support two streams requiring metadata at the same time.
3815 // (because we are not making copies, and metadata buffer is not reference counted.
3816 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003817 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3818 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003819 if (iter->need_metadata) {
3820 internalPproc = true;
3821 QCamera3ProcessingChannel *channel =
3822 (QCamera3ProcessingChannel *)iter->stream->priv;
3823 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003824 if(p_is_metabuf_queued != NULL) {
3825 *p_is_metabuf_queued = true;
3826 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003827 break;
3828 }
3829 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003830 for (auto itr = pendingRequest.internalRequestList.begin();
3831 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003832 if (itr->need_metadata) {
3833 internalPproc = true;
3834 QCamera3ProcessingChannel *channel =
3835 (QCamera3ProcessingChannel *)itr->stream->priv;
3836 channel->queueReprocMetadata(metadata_buf);
3837 break;
3838 }
3839 }
3840
Thierry Strudel54dc9782017-02-15 12:12:10 -08003841 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003842
3843 bool *enableZsl = nullptr;
3844 if (gExposeEnableZslKey) {
3845 enableZsl = &pendingRequest.enableZsl;
3846 }
3847
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003848 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003849 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003850 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003851
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003852 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003853
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003854 if (pendingRequest.blob_request) {
3855 //Dump tuning metadata if enabled and available
3856 char prop[PROPERTY_VALUE_MAX];
3857 memset(prop, 0, sizeof(prop));
3858 property_get("persist.camera.dumpmetadata", prop, "0");
3859 int32_t enabled = atoi(prop);
3860 if (enabled && metadata->is_tuning_params_valid) {
3861 dumpMetadataToFile(metadata->tuning_params,
3862 mMetaFrameCount,
3863 enabled,
3864 "Snapshot",
3865 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003866 }
3867 }
3868
3869 if (!internalPproc) {
3870 LOGD("couldn't find need_metadata for this metadata");
3871 // Return metadata buffer
3872 if (free_and_bufdone_meta_buf) {
3873 mMetadataChannel->bufDone(metadata_buf);
3874 free(metadata_buf);
3875 }
3876 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003877
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003878 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003879 }
3880 }
3881
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003882 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3883
3884 // Try to send out capture result metadata.
3885 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003886 return;
3887
Thierry Strudel3d639192016-09-09 11:52:26 -07003888done_metadata:
3889 for (pendingRequestIterator i = mPendingRequestsList.begin();
3890 i != mPendingRequestsList.end() ;i++) {
3891 i->pipeline_depth++;
3892 }
3893 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3894 unblockRequestIfNecessary();
3895}
3896
3897/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003898 * FUNCTION : handleDepthDataWithLock
3899 *
3900 * DESCRIPTION: Handles incoming depth data
3901 *
3902 * PARAMETERS : @depthData : Depth data
3903 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003904 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003905 *
3906 * RETURN :
3907 *
3908 *==========================================================================*/
3909void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003910 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003911 uint32_t currentFrameNumber;
3912 buffer_handle_t *depthBuffer;
3913
3914 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003915 return;
3916 }
3917
3918 camera3_stream_buffer_t resultBuffer =
3919 {.acquire_fence = -1,
3920 .release_fence = -1,
3921 .status = CAMERA3_BUFFER_STATUS_OK,
3922 .buffer = nullptr,
3923 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003924 do {
3925 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3926 if (nullptr == depthBuffer) {
3927 break;
3928 }
3929
Emilian Peev7650c122017-01-19 08:24:33 -08003930 resultBuffer.buffer = depthBuffer;
3931 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003932 if (valid) {
3933 int32_t rc = mDepthChannel->populateDepthData(depthData,
3934 frameNumber);
3935 if (NO_ERROR != rc) {
3936 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3937 } else {
3938 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3939 }
Emilian Peev7650c122017-01-19 08:24:33 -08003940 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003941 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003942 }
3943 } else if (currentFrameNumber > frameNumber) {
3944 break;
3945 } else {
3946 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3947 {{currentFrameNumber, mDepthChannel->getStream(),
3948 CAMERA3_MSG_ERROR_BUFFER}}};
3949 orchestrateNotify(&notify_msg);
3950
3951 LOGE("Depth buffer for frame number: %d is missing "
3952 "returning back!", currentFrameNumber);
3953 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3954 }
3955 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003956 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003957 } while (currentFrameNumber < frameNumber);
3958}
3959
3960/*===========================================================================
3961 * FUNCTION : notifyErrorFoPendingDepthData
3962 *
3963 * DESCRIPTION: Returns error for any pending depth buffers
3964 *
3965 * PARAMETERS : depthCh - depth channel that needs to get flushed
3966 *
3967 * RETURN :
3968 *
3969 *==========================================================================*/
3970void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3971 QCamera3DepthChannel *depthCh) {
3972 uint32_t currentFrameNumber;
3973 buffer_handle_t *depthBuffer;
3974
3975 if (nullptr == depthCh) {
3976 return;
3977 }
3978
3979 camera3_notify_msg_t notify_msg =
3980 {.type = CAMERA3_MSG_ERROR,
3981 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3982 camera3_stream_buffer_t resultBuffer =
3983 {.acquire_fence = -1,
3984 .release_fence = -1,
3985 .buffer = nullptr,
3986 .stream = depthCh->getStream(),
3987 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003988
3989 while (nullptr !=
3990 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3991 depthCh->unmapBuffer(currentFrameNumber);
3992
3993 notify_msg.message.error.frame_number = currentFrameNumber;
3994 orchestrateNotify(&notify_msg);
3995
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003996 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003997 };
3998}
3999
4000/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004001 * FUNCTION : hdrPlusPerfLock
4002 *
4003 * DESCRIPTION: perf lock for HDR+ using custom intent
4004 *
4005 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4006 *
4007 * RETURN : None
4008 *
4009 *==========================================================================*/
4010void QCamera3HardwareInterface::hdrPlusPerfLock(
4011 mm_camera_super_buf_t *metadata_buf)
4012{
4013 if (NULL == metadata_buf) {
4014 LOGE("metadata_buf is NULL");
4015 return;
4016 }
4017 metadata_buffer_t *metadata =
4018 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4019 int32_t *p_frame_number_valid =
4020 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4021 uint32_t *p_frame_number =
4022 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4023
4024 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4025 LOGE("%s: Invalid metadata", __func__);
4026 return;
4027 }
4028
Wei Wang01385482017-08-03 10:49:34 -07004029 //acquire perf lock for 2 secs after the last HDR frame is captured
4030 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004031 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4032 if ((p_frame_number != NULL) &&
4033 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004034 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004035 }
4036 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004037}
4038
4039/*===========================================================================
4040 * FUNCTION : handleInputBufferWithLock
4041 *
4042 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4043 *
4044 * PARAMETERS : @frame_number: frame number of the input buffer
4045 *
4046 * RETURN :
4047 *
4048 *==========================================================================*/
4049void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4050{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004051 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004052 pendingRequestIterator i = mPendingRequestsList.begin();
4053 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4054 i++;
4055 }
4056 if (i != mPendingRequestsList.end() && i->input_buffer) {
4057 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004058 CameraMetadata settings;
4059 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4060 if(i->settings) {
4061 settings = i->settings;
4062 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4063 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004064 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004065 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004066 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004067 } else {
4068 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004069 }
4070
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004071 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4072 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4073 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004074
4075 camera3_capture_result result;
4076 memset(&result, 0, sizeof(camera3_capture_result));
4077 result.frame_number = frame_number;
4078 result.result = i->settings;
4079 result.input_buffer = i->input_buffer;
4080 result.partial_result = PARTIAL_RESULT_COUNT;
4081
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004082 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004083 LOGD("Input request metadata and input buffer frame_number = %u",
4084 i->frame_number);
4085 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004086
4087 // Dispatch result metadata that may be just unblocked by this reprocess result.
4088 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004089 } else {
4090 LOGE("Could not find input request for frame number %d", frame_number);
4091 }
4092}
4093
4094/*===========================================================================
4095 * FUNCTION : handleBufferWithLock
4096 *
4097 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4098 *
4099 * PARAMETERS : @buffer: image buffer for the callback
4100 * @frame_number: frame number of the image buffer
4101 *
4102 * RETURN :
4103 *
4104 *==========================================================================*/
4105void QCamera3HardwareInterface::handleBufferWithLock(
4106 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4107{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004108 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004109
4110 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4111 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4112 }
4113
Thierry Strudel3d639192016-09-09 11:52:26 -07004114 /* Nothing to be done during error state */
4115 if ((ERROR == mState) || (DEINIT == mState)) {
4116 return;
4117 }
4118 if (mFlushPerf) {
4119 handleBuffersDuringFlushLock(buffer);
4120 return;
4121 }
4122 //not in flush
4123 // If the frame number doesn't exist in the pending request list,
4124 // directly send the buffer to the frameworks, and update pending buffers map
4125 // Otherwise, book-keep the buffer.
4126 pendingRequestIterator i = mPendingRequestsList.begin();
4127 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4128 i++;
4129 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004130
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004131 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004132 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004133 // For a reprocessing request, try to send out result metadata.
4134 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004135 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004136 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004137
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004138 // Check if this frame was dropped.
4139 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4140 m != mPendingFrameDropList.end(); m++) {
4141 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4142 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4143 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4144 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4145 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4146 frame_number, streamID);
4147 m = mPendingFrameDropList.erase(m);
4148 break;
4149 }
4150 }
4151
4152 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4153 LOGH("result frame_number = %d, buffer = %p",
4154 frame_number, buffer->buffer);
4155
4156 mPendingBuffersMap.removeBuf(buffer->buffer);
4157 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4158
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004159 if (mPreviewStarted == false) {
4160 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4161 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004162 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4163
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004164 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4165 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4166 mPreviewStarted = true;
4167
4168 // Set power hint for preview
4169 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4170 }
4171 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004172}
4173
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004174void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004175 const camera_metadata_t *resultMetadata)
4176{
4177 // Find the pending request for this result metadata.
4178 auto requestIter = mPendingRequestsList.begin();
4179 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4180 requestIter++;
4181 }
4182
4183 if (requestIter == mPendingRequestsList.end()) {
4184 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4185 return;
4186 }
4187
4188 // Update the result metadata
4189 requestIter->resultMetadata = resultMetadata;
4190
4191 // Check what type of request this is.
4192 bool liveRequest = false;
4193 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004194 // HDR+ request doesn't have partial results.
4195 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004196 } else if (requestIter->input_buffer != nullptr) {
4197 // Reprocessing request result is the same as settings.
4198 requestIter->resultMetadata = requestIter->settings;
4199 // Reprocessing request doesn't have partial results.
4200 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4201 } else {
4202 liveRequest = true;
4203 requestIter->partial_result_cnt++;
4204 mPendingLiveRequest--;
4205
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004206 {
4207 Mutex::Autolock l(gHdrPlusClientLock);
4208 // For a live request, send the metadata to HDR+ client.
4209 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4210 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4211 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4212 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004213 }
4214 }
4215
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004216 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4217}
4218
4219void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4220 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004221 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4222 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004223 bool readyToSend = true;
4224
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004225 // Iterate through the pending requests to send out result metadata that are ready. Also if
4226 // this result metadata belongs to a live request, notify errors for previous live requests
4227 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004228 auto iter = mPendingRequestsList.begin();
4229 while (iter != mPendingRequestsList.end()) {
4230 // Check if current pending request is ready. If it's not ready, the following pending
4231 // requests are also not ready.
4232 if (readyToSend && iter->resultMetadata == nullptr) {
4233 readyToSend = false;
4234 }
4235
4236 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4237
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004238 camera3_capture_result_t result = {};
4239 result.frame_number = iter->frame_number;
4240 result.result = iter->resultMetadata;
4241 result.partial_result = iter->partial_result_cnt;
4242
4243 // If this pending buffer has result metadata, we may be able to send out shutter callback
4244 // and result metadata.
4245 if (iter->resultMetadata != nullptr) {
4246 if (!readyToSend) {
4247 // If any of the previous pending request is not ready, this pending request is
4248 // also not ready to send in order to keep shutter callbacks and result metadata
4249 // in order.
4250 iter++;
4251 continue;
4252 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004253 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004254 // If the result metadata belongs to a live request, notify errors for previous pending
4255 // live requests.
4256 mPendingLiveRequest--;
4257
4258 CameraMetadata dummyMetadata;
4259 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4260 result.result = dummyMetadata.release();
4261
4262 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004263
4264 // partial_result should be PARTIAL_RESULT_CNT in case of
4265 // ERROR_RESULT.
4266 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4267 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004268 } else {
4269 iter++;
4270 continue;
4271 }
4272
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004273 result.output_buffers = nullptr;
4274 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004275 orchestrateResult(&result);
4276
4277 // For reprocessing, result metadata is the same as settings so do not free it here to
4278 // avoid double free.
4279 if (result.result != iter->settings) {
4280 free_camera_metadata((camera_metadata_t *)result.result);
4281 }
4282 iter->resultMetadata = nullptr;
4283 iter = erasePendingRequest(iter);
4284 }
4285
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004286 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004287 for (auto &iter : mPendingRequestsList) {
4288 // Increment pipeline depth for the following pending requests.
4289 if (iter.frame_number > frameNumber) {
4290 iter.pipeline_depth++;
4291 }
4292 }
4293 }
4294
4295 unblockRequestIfNecessary();
4296}
4297
Thierry Strudel3d639192016-09-09 11:52:26 -07004298/*===========================================================================
4299 * FUNCTION : unblockRequestIfNecessary
4300 *
4301 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4302 * that mMutex is held when this function is called.
4303 *
4304 * PARAMETERS :
4305 *
4306 * RETURN :
4307 *
4308 *==========================================================================*/
4309void QCamera3HardwareInterface::unblockRequestIfNecessary()
4310{
4311 // Unblock process_capture_request
4312 pthread_cond_signal(&mRequestCond);
4313}
4314
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004315/*===========================================================================
4316 * FUNCTION : isHdrSnapshotRequest
4317 *
4318 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4319 *
4320 * PARAMETERS : camera3 request structure
4321 *
4322 * RETURN : boolean decision variable
4323 *
4324 *==========================================================================*/
4325bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4326{
4327 if (request == NULL) {
4328 LOGE("Invalid request handle");
4329 assert(0);
4330 return false;
4331 }
4332
4333 if (!mForceHdrSnapshot) {
4334 CameraMetadata frame_settings;
4335 frame_settings = request->settings;
4336
4337 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4338 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4339 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4340 return false;
4341 }
4342 } else {
4343 return false;
4344 }
4345
4346 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4347 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4348 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4349 return false;
4350 }
4351 } else {
4352 return false;
4353 }
4354 }
4355
4356 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4357 if (request->output_buffers[i].stream->format
4358 == HAL_PIXEL_FORMAT_BLOB) {
4359 return true;
4360 }
4361 }
4362
4363 return false;
4364}
4365/*===========================================================================
4366 * FUNCTION : orchestrateRequest
4367 *
4368 * DESCRIPTION: Orchestrates a capture request from camera service
4369 *
4370 * PARAMETERS :
4371 * @request : request from framework to process
4372 *
4373 * RETURN : Error status codes
4374 *
4375 *==========================================================================*/
4376int32_t QCamera3HardwareInterface::orchestrateRequest(
4377 camera3_capture_request_t *request)
4378{
4379
4380 uint32_t originalFrameNumber = request->frame_number;
4381 uint32_t originalOutputCount = request->num_output_buffers;
4382 const camera_metadata_t *original_settings = request->settings;
4383 List<InternalRequest> internallyRequestedStreams;
4384 List<InternalRequest> emptyInternalList;
4385
4386 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4387 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4388 uint32_t internalFrameNumber;
4389 CameraMetadata modified_meta;
4390
4391
4392 /* Add Blob channel to list of internally requested streams */
4393 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4394 if (request->output_buffers[i].stream->format
4395 == HAL_PIXEL_FORMAT_BLOB) {
4396 InternalRequest streamRequested;
4397 streamRequested.meteringOnly = 1;
4398 streamRequested.need_metadata = 0;
4399 streamRequested.stream = request->output_buffers[i].stream;
4400 internallyRequestedStreams.push_back(streamRequested);
4401 }
4402 }
4403 request->num_output_buffers = 0;
4404 auto itr = internallyRequestedStreams.begin();
4405
4406 /* Modify setting to set compensation */
4407 modified_meta = request->settings;
4408 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4409 uint8_t aeLock = 1;
4410 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4411 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4412 camera_metadata_t *modified_settings = modified_meta.release();
4413 request->settings = modified_settings;
4414
4415 /* Capture Settling & -2x frame */
4416 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4417 request->frame_number = internalFrameNumber;
4418 processCaptureRequest(request, internallyRequestedStreams);
4419
4420 request->num_output_buffers = originalOutputCount;
4421 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4422 request->frame_number = internalFrameNumber;
4423 processCaptureRequest(request, emptyInternalList);
4424 request->num_output_buffers = 0;
4425
4426 modified_meta = modified_settings;
4427 expCompensation = 0;
4428 aeLock = 1;
4429 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4430 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4431 modified_settings = modified_meta.release();
4432 request->settings = modified_settings;
4433
4434 /* Capture Settling & 0X frame */
4435
4436 itr = internallyRequestedStreams.begin();
4437 if (itr == internallyRequestedStreams.end()) {
4438 LOGE("Error Internally Requested Stream list is empty");
4439 assert(0);
4440 } else {
4441 itr->need_metadata = 0;
4442 itr->meteringOnly = 1;
4443 }
4444
4445 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4446 request->frame_number = internalFrameNumber;
4447 processCaptureRequest(request, internallyRequestedStreams);
4448
4449 itr = internallyRequestedStreams.begin();
4450 if (itr == internallyRequestedStreams.end()) {
4451 ALOGE("Error Internally Requested Stream list is empty");
4452 assert(0);
4453 } else {
4454 itr->need_metadata = 1;
4455 itr->meteringOnly = 0;
4456 }
4457
4458 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4459 request->frame_number = internalFrameNumber;
4460 processCaptureRequest(request, internallyRequestedStreams);
4461
4462 /* Capture 2X frame*/
4463 modified_meta = modified_settings;
4464 expCompensation = GB_HDR_2X_STEP_EV;
4465 aeLock = 1;
4466 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4467 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4468 modified_settings = modified_meta.release();
4469 request->settings = modified_settings;
4470
4471 itr = internallyRequestedStreams.begin();
4472 if (itr == internallyRequestedStreams.end()) {
4473 ALOGE("Error Internally Requested Stream list is empty");
4474 assert(0);
4475 } else {
4476 itr->need_metadata = 0;
4477 itr->meteringOnly = 1;
4478 }
4479 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4480 request->frame_number = internalFrameNumber;
4481 processCaptureRequest(request, internallyRequestedStreams);
4482
4483 itr = internallyRequestedStreams.begin();
4484 if (itr == internallyRequestedStreams.end()) {
4485 ALOGE("Error Internally Requested Stream list is empty");
4486 assert(0);
4487 } else {
4488 itr->need_metadata = 1;
4489 itr->meteringOnly = 0;
4490 }
4491
4492 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4493 request->frame_number = internalFrameNumber;
4494 processCaptureRequest(request, internallyRequestedStreams);
4495
4496
4497 /* Capture 2X on original streaming config*/
4498 internallyRequestedStreams.clear();
4499
4500 /* Restore original settings pointer */
4501 request->settings = original_settings;
4502 } else {
4503 uint32_t internalFrameNumber;
4504 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4505 request->frame_number = internalFrameNumber;
4506 return processCaptureRequest(request, internallyRequestedStreams);
4507 }
4508
4509 return NO_ERROR;
4510}
4511
4512/*===========================================================================
4513 * FUNCTION : orchestrateResult
4514 *
4515 * DESCRIPTION: Orchestrates a capture result to camera service
4516 *
4517 * PARAMETERS :
4518 * @request : request from framework to process
4519 *
4520 * RETURN :
4521 *
4522 *==========================================================================*/
4523void QCamera3HardwareInterface::orchestrateResult(
4524 camera3_capture_result_t *result)
4525{
4526 uint32_t frameworkFrameNumber;
4527 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4528 frameworkFrameNumber);
4529 if (rc != NO_ERROR) {
4530 LOGE("Cannot find translated frameworkFrameNumber");
4531 assert(0);
4532 } else {
4533 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004534 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004535 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004536 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004537 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4538 camera_metadata_entry_t entry;
4539 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4540 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004541 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004542 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4543 if (ret != OK)
4544 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004545 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004546 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004547 result->frame_number = frameworkFrameNumber;
4548 mCallbackOps->process_capture_result(mCallbackOps, result);
4549 }
4550 }
4551}
4552
4553/*===========================================================================
4554 * FUNCTION : orchestrateNotify
4555 *
4556 * DESCRIPTION: Orchestrates a notify to camera service
4557 *
4558 * PARAMETERS :
4559 * @request : request from framework to process
4560 *
4561 * RETURN :
4562 *
4563 *==========================================================================*/
4564void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4565{
4566 uint32_t frameworkFrameNumber;
4567 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004568 int32_t rc = NO_ERROR;
4569
4570 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004571 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004572
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004573 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004574 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4575 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4576 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004577 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004578 LOGE("Cannot find translated frameworkFrameNumber");
4579 assert(0);
4580 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004581 }
4582 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004583
4584 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4585 LOGD("Internal Request drop the notifyCb");
4586 } else {
4587 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4588 mCallbackOps->notify(mCallbackOps, notify_msg);
4589 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004590}
4591
4592/*===========================================================================
4593 * FUNCTION : FrameNumberRegistry
4594 *
4595 * DESCRIPTION: Constructor
4596 *
4597 * PARAMETERS :
4598 *
4599 * RETURN :
4600 *
4601 *==========================================================================*/
4602FrameNumberRegistry::FrameNumberRegistry()
4603{
4604 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4605}
4606
4607/*===========================================================================
4608 * FUNCTION : ~FrameNumberRegistry
4609 *
4610 * DESCRIPTION: Destructor
4611 *
4612 * PARAMETERS :
4613 *
4614 * RETURN :
4615 *
4616 *==========================================================================*/
4617FrameNumberRegistry::~FrameNumberRegistry()
4618{
4619}
4620
4621/*===========================================================================
4622 * FUNCTION : PurgeOldEntriesLocked
4623 *
4624 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4625 *
4626 * PARAMETERS :
4627 *
4628 * RETURN : NONE
4629 *
4630 *==========================================================================*/
4631void FrameNumberRegistry::purgeOldEntriesLocked()
4632{
4633 while (_register.begin() != _register.end()) {
4634 auto itr = _register.begin();
4635 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4636 _register.erase(itr);
4637 } else {
4638 return;
4639 }
4640 }
4641}
4642
4643/*===========================================================================
4644 * FUNCTION : allocStoreInternalFrameNumber
4645 *
4646 * DESCRIPTION: Method to note down a framework request and associate a new
4647 * internal request number against it
4648 *
4649 * PARAMETERS :
4650 * @fFrameNumber: Identifier given by framework
4651 * @internalFN : Output parameter which will have the newly generated internal
4652 * entry
4653 *
4654 * RETURN : Error code
4655 *
4656 *==========================================================================*/
4657int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4658 uint32_t &internalFrameNumber)
4659{
4660 Mutex::Autolock lock(mRegistryLock);
4661 internalFrameNumber = _nextFreeInternalNumber++;
4662 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4663 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4664 purgeOldEntriesLocked();
4665 return NO_ERROR;
4666}
4667
4668/*===========================================================================
4669 * FUNCTION : generateStoreInternalFrameNumber
4670 *
4671 * DESCRIPTION: Method to associate a new internal request number independent
4672 * of any associate with framework requests
4673 *
4674 * PARAMETERS :
4675 * @internalFrame#: Output parameter which will have the newly generated internal
4676 *
4677 *
4678 * RETURN : Error code
4679 *
4680 *==========================================================================*/
4681int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4682{
4683 Mutex::Autolock lock(mRegistryLock);
4684 internalFrameNumber = _nextFreeInternalNumber++;
4685 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4686 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4687 purgeOldEntriesLocked();
4688 return NO_ERROR;
4689}
4690
4691/*===========================================================================
4692 * FUNCTION : getFrameworkFrameNumber
4693 *
4694 * DESCRIPTION: Method to query the framework framenumber given an internal #
4695 *
4696 * PARAMETERS :
4697 * @internalFrame#: Internal reference
4698 * @frameworkframenumber: Output parameter holding framework frame entry
4699 *
4700 * RETURN : Error code
4701 *
4702 *==========================================================================*/
4703int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4704 uint32_t &frameworkFrameNumber)
4705{
4706 Mutex::Autolock lock(mRegistryLock);
4707 auto itr = _register.find(internalFrameNumber);
4708 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004709 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004710 return -ENOENT;
4711 }
4712
4713 frameworkFrameNumber = itr->second;
4714 purgeOldEntriesLocked();
4715 return NO_ERROR;
4716}
Thierry Strudel3d639192016-09-09 11:52:26 -07004717
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004718status_t QCamera3HardwareInterface::fillPbStreamConfig(
4719 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4720 QCamera3Channel *channel, uint32_t streamIndex) {
4721 if (config == nullptr) {
4722 LOGE("%s: config is null", __FUNCTION__);
4723 return BAD_VALUE;
4724 }
4725
4726 if (channel == nullptr) {
4727 LOGE("%s: channel is null", __FUNCTION__);
4728 return BAD_VALUE;
4729 }
4730
4731 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4732 if (stream == nullptr) {
4733 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4734 return NAME_NOT_FOUND;
4735 }
4736
4737 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4738 if (streamInfo == nullptr) {
4739 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4740 return NAME_NOT_FOUND;
4741 }
4742
4743 config->id = pbStreamId;
4744 config->image.width = streamInfo->dim.width;
4745 config->image.height = streamInfo->dim.height;
4746 config->image.padding = 0;
4747 config->image.format = pbStreamFormat;
4748
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004749 uint32_t totalPlaneSize = 0;
4750
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004751 // Fill plane information.
4752 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4753 pbcamera::PlaneConfiguration plane;
4754 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4755 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4756 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004757
4758 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004759 }
4760
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004761 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004762 return OK;
4763}
4764
Thierry Strudel3d639192016-09-09 11:52:26 -07004765/*===========================================================================
4766 * FUNCTION : processCaptureRequest
4767 *
4768 * DESCRIPTION: process a capture request from camera service
4769 *
4770 * PARAMETERS :
4771 * @request : request from framework to process
4772 *
4773 * RETURN :
4774 *
4775 *==========================================================================*/
4776int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004777 camera3_capture_request_t *request,
4778 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004779{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004780 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004781 int rc = NO_ERROR;
4782 int32_t request_id;
4783 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004784 bool isVidBufRequested = false;
4785 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004786 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004787
4788 pthread_mutex_lock(&mMutex);
4789
4790 // Validate current state
4791 switch (mState) {
4792 case CONFIGURED:
4793 case STARTED:
4794 /* valid state */
4795 break;
4796
4797 case ERROR:
4798 pthread_mutex_unlock(&mMutex);
4799 handleCameraDeviceError();
4800 return -ENODEV;
4801
4802 default:
4803 LOGE("Invalid state %d", mState);
4804 pthread_mutex_unlock(&mMutex);
4805 return -ENODEV;
4806 }
4807
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004808 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004809 if (rc != NO_ERROR) {
4810 LOGE("incoming request is not valid");
4811 pthread_mutex_unlock(&mMutex);
4812 return rc;
4813 }
4814
4815 meta = request->settings;
4816
4817 // For first capture request, send capture intent, and
4818 // stream on all streams
4819 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004820 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004821 // send an unconfigure to the backend so that the isp
4822 // resources are deallocated
4823 if (!mFirstConfiguration) {
4824 cam_stream_size_info_t stream_config_info;
4825 int32_t hal_version = CAM_HAL_V3;
4826 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4827 stream_config_info.buffer_info.min_buffers =
4828 MIN_INFLIGHT_REQUESTS;
4829 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004830 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004831 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 clear_metadata_buffer(mParameters);
4833 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4834 CAM_INTF_PARM_HAL_VERSION, hal_version);
4835 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4836 CAM_INTF_META_STREAM_INFO, stream_config_info);
4837 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4838 mParameters);
4839 if (rc < 0) {
4840 LOGE("set_parms for unconfigure failed");
4841 pthread_mutex_unlock(&mMutex);
4842 return rc;
4843 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004844
Thierry Strudel3d639192016-09-09 11:52:26 -07004845 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004846 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004847 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004848 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004849 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004850 property_get("persist.camera.is_type", is_type_value, "4");
4851 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4852 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4853 property_get("persist.camera.is_type_preview", is_type_value, "4");
4854 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4855 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004856
4857 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4858 int32_t hal_version = CAM_HAL_V3;
4859 uint8_t captureIntent =
4860 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4861 mCaptureIntent = captureIntent;
4862 clear_metadata_buffer(mParameters);
4863 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4864 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4865 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004866 if (mFirstConfiguration) {
4867 // configure instant AEC
4868 // Instant AEC is a session based parameter and it is needed only
4869 // once per complete session after open camera.
4870 // i.e. This is set only once for the first capture request, after open camera.
4871 setInstantAEC(meta);
4872 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004873 uint8_t fwkVideoStabMode=0;
4874 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4875 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4876 }
4877
Xue Tuecac74e2017-04-17 13:58:15 -07004878 // If EIS setprop is enabled then only turn it on for video/preview
4879 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004880 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004881 int32_t vsMode;
4882 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4883 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4884 rc = BAD_VALUE;
4885 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004886 LOGD("setEis %d", setEis);
4887 bool eis3Supported = false;
4888 size_t count = IS_TYPE_MAX;
4889 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4890 for (size_t i = 0; i < count; i++) {
4891 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4892 eis3Supported = true;
4893 break;
4894 }
4895 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004896
4897 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004898 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004899 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4900 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004901 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4902 is_type = isTypePreview;
4903 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4904 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4905 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004906 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004907 } else {
4908 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004909 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004910 } else {
4911 is_type = IS_TYPE_NONE;
4912 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004913 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004914 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004915 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4916 }
4917 }
4918
4919 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4920 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4921
Thierry Strudel54dc9782017-02-15 12:12:10 -08004922 //Disable tintless only if the property is set to 0
4923 memset(prop, 0, sizeof(prop));
4924 property_get("persist.camera.tintless.enable", prop, "1");
4925 int32_t tintless_value = atoi(prop);
4926
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4928 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004929
Thierry Strudel3d639192016-09-09 11:52:26 -07004930 //Disable CDS for HFR mode or if DIS/EIS is on.
4931 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4932 //after every configure_stream
4933 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4934 (m_bIsVideo)) {
4935 int32_t cds = CAM_CDS_MODE_OFF;
4936 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4937 CAM_INTF_PARM_CDS_MODE, cds))
4938 LOGE("Failed to disable CDS for HFR mode");
4939
4940 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004941
4942 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4943 uint8_t* use_av_timer = NULL;
4944
4945 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004946 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004947 use_av_timer = &m_debug_avtimer;
4948 }
4949 else{
4950 use_av_timer =
4951 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004952 if (use_av_timer) {
4953 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4954 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004955 }
4956
4957 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4958 rc = BAD_VALUE;
4959 }
4960 }
4961
Thierry Strudel3d639192016-09-09 11:52:26 -07004962 setMobicat();
4963
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004964 uint8_t nrMode = 0;
4965 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4966 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4967 }
4968
Thierry Strudel3d639192016-09-09 11:52:26 -07004969 /* Set fps and hfr mode while sending meta stream info so that sensor
4970 * can configure appropriate streaming mode */
4971 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004972 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4973 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004974 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4975 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004976 if (rc == NO_ERROR) {
4977 int32_t max_fps =
4978 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004979 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004980 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4981 }
4982 /* For HFR, more buffers are dequeued upfront to improve the performance */
4983 if (mBatchSize) {
4984 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4985 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4986 }
4987 }
4988 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004989 LOGE("setHalFpsRange failed");
4990 }
4991 }
4992 if (meta.exists(ANDROID_CONTROL_MODE)) {
4993 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4994 rc = extractSceneMode(meta, metaMode, mParameters);
4995 if (rc != NO_ERROR) {
4996 LOGE("extractSceneMode failed");
4997 }
4998 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004999 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005000
Thierry Strudel04e026f2016-10-10 11:27:36 -07005001 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5002 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5003 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5004 rc = setVideoHdrMode(mParameters, vhdr);
5005 if (rc != NO_ERROR) {
5006 LOGE("setVideoHDR is failed");
5007 }
5008 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005009
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005010 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005011 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005012 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005013 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5014 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5015 sensorModeFullFov)) {
5016 rc = BAD_VALUE;
5017 }
5018 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005019 //TODO: validate the arguments, HSV scenemode should have only the
5020 //advertised fps ranges
5021
5022 /*set the capture intent, hal version, tintless, stream info,
5023 *and disenable parameters to the backend*/
5024 LOGD("set_parms META_STREAM_INFO " );
5025 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005026 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5027 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005028 mStreamConfigInfo.type[i],
5029 mStreamConfigInfo.stream_sizes[i].width,
5030 mStreamConfigInfo.stream_sizes[i].height,
5031 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005032 mStreamConfigInfo.format[i],
5033 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005034 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005035
Thierry Strudel3d639192016-09-09 11:52:26 -07005036 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5037 mParameters);
5038 if (rc < 0) {
5039 LOGE("set_parms failed for hal version, stream info");
5040 }
5041
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005042 cam_sensor_mode_info_t sensorModeInfo = {};
5043 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005044 if (rc != NO_ERROR) {
5045 LOGE("Failed to get sensor output size");
5046 pthread_mutex_unlock(&mMutex);
5047 goto error_exit;
5048 }
5049
5050 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5051 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005052 sensorModeInfo.active_array_size.width,
5053 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005054
5055 /* Set batchmode before initializing channel. Since registerBuffer
5056 * internally initializes some of the channels, better set batchmode
5057 * even before first register buffer */
5058 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5059 it != mStreamInfo.end(); it++) {
5060 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5061 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5062 && mBatchSize) {
5063 rc = channel->setBatchSize(mBatchSize);
5064 //Disable per frame map unmap for HFR/batchmode case
5065 rc |= channel->setPerFrameMapUnmap(false);
5066 if (NO_ERROR != rc) {
5067 LOGE("Channel init failed %d", rc);
5068 pthread_mutex_unlock(&mMutex);
5069 goto error_exit;
5070 }
5071 }
5072 }
5073
5074 //First initialize all streams
5075 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5076 it != mStreamInfo.end(); it++) {
5077 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005078
5079 /* Initial value of NR mode is needed before stream on */
5080 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005081 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5082 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005083 setEis) {
5084 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5085 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5086 is_type = mStreamConfigInfo.is_type[i];
5087 break;
5088 }
5089 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005090 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005091 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005092 rc = channel->initialize(IS_TYPE_NONE);
5093 }
5094 if (NO_ERROR != rc) {
5095 LOGE("Channel initialization failed %d", rc);
5096 pthread_mutex_unlock(&mMutex);
5097 goto error_exit;
5098 }
5099 }
5100
5101 if (mRawDumpChannel) {
5102 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5103 if (rc != NO_ERROR) {
5104 LOGE("Error: Raw Dump Channel init failed");
5105 pthread_mutex_unlock(&mMutex);
5106 goto error_exit;
5107 }
5108 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005109 if (mHdrPlusRawSrcChannel) {
5110 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5111 if (rc != NO_ERROR) {
5112 LOGE("Error: HDR+ RAW Source Channel init failed");
5113 pthread_mutex_unlock(&mMutex);
5114 goto error_exit;
5115 }
5116 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005117 if (mSupportChannel) {
5118 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5119 if (rc < 0) {
5120 LOGE("Support channel initialization failed");
5121 pthread_mutex_unlock(&mMutex);
5122 goto error_exit;
5123 }
5124 }
5125 if (mAnalysisChannel) {
5126 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5127 if (rc < 0) {
5128 LOGE("Analysis channel initialization failed");
5129 pthread_mutex_unlock(&mMutex);
5130 goto error_exit;
5131 }
5132 }
5133 if (mDummyBatchChannel) {
5134 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5135 if (rc < 0) {
5136 LOGE("mDummyBatchChannel setBatchSize failed");
5137 pthread_mutex_unlock(&mMutex);
5138 goto error_exit;
5139 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005140 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005141 if (rc < 0) {
5142 LOGE("mDummyBatchChannel initialization failed");
5143 pthread_mutex_unlock(&mMutex);
5144 goto error_exit;
5145 }
5146 }
5147
5148 // Set bundle info
5149 rc = setBundleInfo();
5150 if (rc < 0) {
5151 LOGE("setBundleInfo failed %d", rc);
5152 pthread_mutex_unlock(&mMutex);
5153 goto error_exit;
5154 }
5155
5156 //update settings from app here
5157 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5158 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5159 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5160 }
5161 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5162 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5163 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5164 }
5165 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5166 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5167 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5168
5169 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5170 (mLinkedCameraId != mCameraId) ) {
5171 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5172 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005173 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 goto error_exit;
5175 }
5176 }
5177
5178 // add bundle related cameras
5179 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5180 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005181 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5182 &m_pDualCamCmdPtr->bundle_info;
5183 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005184 if (mIsDeviceLinked)
5185 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5186 else
5187 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5188
5189 pthread_mutex_lock(&gCamLock);
5190
5191 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5192 LOGE("Dualcam: Invalid Session Id ");
5193 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005194 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005195 goto error_exit;
5196 }
5197
5198 if (mIsMainCamera == 1) {
5199 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5200 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005201 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005202 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005203 // related session id should be session id of linked session
5204 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5205 } else {
5206 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5207 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005208 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005209 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005210 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5211 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005212 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005213 pthread_mutex_unlock(&gCamLock);
5214
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005215 rc = mCameraHandle->ops->set_dual_cam_cmd(
5216 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005217 if (rc < 0) {
5218 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005219 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005220 goto error_exit;
5221 }
5222 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005223 goto no_error;
5224error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005225 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005226 return rc;
5227no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 mWokenUpByDaemon = false;
5229 mPendingLiveRequest = 0;
5230 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005231 }
5232
5233 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005234 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005235
5236 if (mFlushPerf) {
5237 //we cannot accept any requests during flush
5238 LOGE("process_capture_request cannot proceed during flush");
5239 pthread_mutex_unlock(&mMutex);
5240 return NO_ERROR; //should return an error
5241 }
5242
5243 if (meta.exists(ANDROID_REQUEST_ID)) {
5244 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5245 mCurrentRequestId = request_id;
5246 LOGD("Received request with id: %d", request_id);
5247 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5248 LOGE("Unable to find request id field, \
5249 & no previous id available");
5250 pthread_mutex_unlock(&mMutex);
5251 return NAME_NOT_FOUND;
5252 } else {
5253 LOGD("Re-using old request id");
5254 request_id = mCurrentRequestId;
5255 }
5256
5257 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5258 request->num_output_buffers,
5259 request->input_buffer,
5260 frameNumber);
5261 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005262 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005263 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005264 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005265 uint32_t snapshotStreamId = 0;
5266 for (size_t i = 0; i < request->num_output_buffers; i++) {
5267 const camera3_stream_buffer_t& output = request->output_buffers[i];
5268 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5269
Emilian Peev7650c122017-01-19 08:24:33 -08005270 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5271 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005272 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005273 blob_request = 1;
5274 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5275 }
5276
5277 if (output.acquire_fence != -1) {
5278 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5279 close(output.acquire_fence);
5280 if (rc != OK) {
5281 LOGE("sync wait failed %d", rc);
5282 pthread_mutex_unlock(&mMutex);
5283 return rc;
5284 }
5285 }
5286
Emilian Peev0f3c3162017-03-15 12:57:46 +00005287 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5288 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005289 depthRequestPresent = true;
5290 continue;
5291 }
5292
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005293 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005294 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005295
5296 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5297 isVidBufRequested = true;
5298 }
5299 }
5300
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005301 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5302 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5303 itr++) {
5304 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5305 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5306 channel->getStreamID(channel->getStreamTypeMask());
5307
5308 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5309 isVidBufRequested = true;
5310 }
5311 }
5312
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005314 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005315 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005316 }
5317 if (blob_request && mRawDumpChannel) {
5318 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005319 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005320 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005321 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005322 }
5323
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005324 {
5325 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5326 // Request a RAW buffer if
5327 // 1. mHdrPlusRawSrcChannel is valid.
5328 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5329 // 3. There is no pending HDR+ request.
5330 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5331 mHdrPlusPendingRequests.size() == 0) {
5332 streamsArray.stream_request[streamsArray.num_streams].streamID =
5333 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5334 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5335 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005336 }
5337
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005338 //extract capture intent
5339 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5340 mCaptureIntent =
5341 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5342 }
5343
5344 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5345 mCacMode =
5346 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5347 }
5348
5349 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005350 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005351
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005352 {
5353 Mutex::Autolock l(gHdrPlusClientLock);
5354 // If this request has a still capture intent, try to submit an HDR+ request.
5355 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5356 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5357 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5358 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005359 }
5360
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005361 if (hdrPlusRequest) {
5362 // For a HDR+ request, just set the frame parameters.
5363 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5364 if (rc < 0) {
5365 LOGE("fail to set frame parameters");
5366 pthread_mutex_unlock(&mMutex);
5367 return rc;
5368 }
5369 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005370 /* Parse the settings:
5371 * - For every request in NORMAL MODE
5372 * - For every request in HFR mode during preview only case
5373 * - For first request of every batch in HFR mode during video
5374 * recording. In batchmode the same settings except frame number is
5375 * repeated in each request of the batch.
5376 */
5377 if (!mBatchSize ||
5378 (mBatchSize && !isVidBufRequested) ||
5379 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005380 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005381 if (rc < 0) {
5382 LOGE("fail to set frame parameters");
5383 pthread_mutex_unlock(&mMutex);
5384 return rc;
5385 }
5386 }
5387 /* For batchMode HFR, setFrameParameters is not called for every
5388 * request. But only frame number of the latest request is parsed.
5389 * Keep track of first and last frame numbers in a batch so that
5390 * metadata for the frame numbers of batch can be duplicated in
5391 * handleBatchMetadta */
5392 if (mBatchSize) {
5393 if (!mToBeQueuedVidBufs) {
5394 //start of the batch
5395 mFirstFrameNumberInBatch = request->frame_number;
5396 }
5397 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5398 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5399 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005400 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005401 return BAD_VALUE;
5402 }
5403 }
5404 if (mNeedSensorRestart) {
5405 /* Unlock the mutex as restartSensor waits on the channels to be
5406 * stopped, which in turn calls stream callback functions -
5407 * handleBufferWithLock and handleMetadataWithLock */
5408 pthread_mutex_unlock(&mMutex);
5409 rc = dynamicUpdateMetaStreamInfo();
5410 if (rc != NO_ERROR) {
5411 LOGE("Restarting the sensor failed");
5412 return BAD_VALUE;
5413 }
5414 mNeedSensorRestart = false;
5415 pthread_mutex_lock(&mMutex);
5416 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005417 if(mResetInstantAEC) {
5418 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5419 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5420 mResetInstantAEC = false;
5421 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005422 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 if (request->input_buffer->acquire_fence != -1) {
5424 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5425 close(request->input_buffer->acquire_fence);
5426 if (rc != OK) {
5427 LOGE("input buffer sync wait failed %d", rc);
5428 pthread_mutex_unlock(&mMutex);
5429 return rc;
5430 }
5431 }
5432 }
5433
5434 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5435 mLastCustIntentFrmNum = frameNumber;
5436 }
5437 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005438 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005439 pendingRequestIterator latestRequest;
5440 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005441 pendingRequest.num_buffers = depthRequestPresent ?
5442 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005443 pendingRequest.request_id = request_id;
5444 pendingRequest.blob_request = blob_request;
5445 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005446 if (request->input_buffer) {
5447 pendingRequest.input_buffer =
5448 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5449 *(pendingRequest.input_buffer) = *(request->input_buffer);
5450 pInputBuffer = pendingRequest.input_buffer;
5451 } else {
5452 pendingRequest.input_buffer = NULL;
5453 pInputBuffer = NULL;
5454 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005455 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005456
5457 pendingRequest.pipeline_depth = 0;
5458 pendingRequest.partial_result_cnt = 0;
5459 extractJpegMetadata(mCurJpegMeta, request);
5460 pendingRequest.jpegMetadata = mCurJpegMeta;
5461 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005462 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005463 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5464 mHybridAeEnable =
5465 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5466 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005467
5468 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5469 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005470 /* DevCamDebug metadata processCaptureRequest */
5471 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5472 mDevCamDebugMetaEnable =
5473 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5474 }
5475 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5476 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005477
5478 //extract CAC info
5479 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5480 mCacMode =
5481 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5482 }
5483 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005484 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005485
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005486 // extract enableZsl info
5487 if (gExposeEnableZslKey) {
5488 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5489 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5490 mZslEnabled = pendingRequest.enableZsl;
5491 } else {
5492 pendingRequest.enableZsl = mZslEnabled;
5493 }
5494 }
5495
Thierry Strudel3d639192016-09-09 11:52:26 -07005496 PendingBuffersInRequest bufsForCurRequest;
5497 bufsForCurRequest.frame_number = frameNumber;
5498 // Mark current timestamp for the new request
5499 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005500 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005501
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005502 if (hdrPlusRequest) {
5503 // Save settings for this request.
5504 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5505 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5506
5507 // Add to pending HDR+ request queue.
5508 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5509 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5510
5511 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5512 }
5513
Thierry Strudel3d639192016-09-09 11:52:26 -07005514 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005515 if ((request->output_buffers[i].stream->data_space ==
5516 HAL_DATASPACE_DEPTH) &&
5517 (HAL_PIXEL_FORMAT_BLOB ==
5518 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005519 continue;
5520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005521 RequestedBufferInfo requestedBuf;
5522 memset(&requestedBuf, 0, sizeof(requestedBuf));
5523 requestedBuf.stream = request->output_buffers[i].stream;
5524 requestedBuf.buffer = NULL;
5525 pendingRequest.buffers.push_back(requestedBuf);
5526
5527 // Add to buffer handle the pending buffers list
5528 PendingBufferInfo bufferInfo;
5529 bufferInfo.buffer = request->output_buffers[i].buffer;
5530 bufferInfo.stream = request->output_buffers[i].stream;
5531 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5532 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5533 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5534 frameNumber, bufferInfo.buffer,
5535 channel->getStreamTypeMask(), bufferInfo.stream->format);
5536 }
5537 // Add this request packet into mPendingBuffersMap
5538 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5539 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5540 mPendingBuffersMap.get_num_overall_buffers());
5541
5542 latestRequest = mPendingRequestsList.insert(
5543 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005544
5545 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5546 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005547 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005548 for (size_t i = 0; i < request->num_output_buffers; i++) {
5549 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5550 }
5551
Thierry Strudel3d639192016-09-09 11:52:26 -07005552 if(mFlush) {
5553 LOGI("mFlush is true");
5554 pthread_mutex_unlock(&mMutex);
5555 return NO_ERROR;
5556 }
5557
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005558 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5559 // channel.
5560 if (!hdrPlusRequest) {
5561 int indexUsed;
5562 // Notify metadata channel we receive a request
5563 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005564
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005565 if(request->input_buffer != NULL){
5566 LOGD("Input request, frame_number %d", frameNumber);
5567 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5568 if (NO_ERROR != rc) {
5569 LOGE("fail to set reproc parameters");
5570 pthread_mutex_unlock(&mMutex);
5571 return rc;
5572 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005573 }
5574
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005575 // Call request on other streams
5576 uint32_t streams_need_metadata = 0;
5577 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5578 for (size_t i = 0; i < request->num_output_buffers; i++) {
5579 const camera3_stream_buffer_t& output = request->output_buffers[i];
5580 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5581
5582 if (channel == NULL) {
5583 LOGW("invalid channel pointer for stream");
5584 continue;
5585 }
5586
5587 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5588 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5589 output.buffer, request->input_buffer, frameNumber);
5590 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005591 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005592 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5593 if (rc < 0) {
5594 LOGE("Fail to request on picture channel");
5595 pthread_mutex_unlock(&mMutex);
5596 return rc;
5597 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005598 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005599 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5600 assert(NULL != mDepthChannel);
5601 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005602
Emilian Peev7650c122017-01-19 08:24:33 -08005603 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5604 if (rc < 0) {
5605 LOGE("Fail to map on depth buffer");
5606 pthread_mutex_unlock(&mMutex);
5607 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005608 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005609 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005610 } else {
5611 LOGD("snapshot request with buffer %p, frame_number %d",
5612 output.buffer, frameNumber);
5613 if (!request->settings) {
5614 rc = channel->request(output.buffer, frameNumber,
5615 NULL, mPrevParameters, indexUsed);
5616 } else {
5617 rc = channel->request(output.buffer, frameNumber,
5618 NULL, mParameters, indexUsed);
5619 }
5620 if (rc < 0) {
5621 LOGE("Fail to request on picture channel");
5622 pthread_mutex_unlock(&mMutex);
5623 return rc;
5624 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005625
Emilian Peev7650c122017-01-19 08:24:33 -08005626 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5627 uint32_t j = 0;
5628 for (j = 0; j < streamsArray.num_streams; j++) {
5629 if (streamsArray.stream_request[j].streamID == streamId) {
5630 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5631 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5632 else
5633 streamsArray.stream_request[j].buf_index = indexUsed;
5634 break;
5635 }
5636 }
5637 if (j == streamsArray.num_streams) {
5638 LOGE("Did not find matching stream to update index");
5639 assert(0);
5640 }
5641
5642 pendingBufferIter->need_metadata = true;
5643 streams_need_metadata++;
5644 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005645 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005646 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5647 bool needMetadata = false;
5648 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5649 rc = yuvChannel->request(output.buffer, frameNumber,
5650 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5651 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005652 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005653 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005654 pthread_mutex_unlock(&mMutex);
5655 return rc;
5656 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005657
5658 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5659 uint32_t j = 0;
5660 for (j = 0; j < streamsArray.num_streams; j++) {
5661 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005662 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5663 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5664 else
5665 streamsArray.stream_request[j].buf_index = indexUsed;
5666 break;
5667 }
5668 }
5669 if (j == streamsArray.num_streams) {
5670 LOGE("Did not find matching stream to update index");
5671 assert(0);
5672 }
5673
5674 pendingBufferIter->need_metadata = needMetadata;
5675 if (needMetadata)
5676 streams_need_metadata += 1;
5677 LOGD("calling YUV channel request, need_metadata is %d",
5678 needMetadata);
5679 } else {
5680 LOGD("request with buffer %p, frame_number %d",
5681 output.buffer, frameNumber);
5682
5683 rc = channel->request(output.buffer, frameNumber, indexUsed);
5684
5685 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5686 uint32_t j = 0;
5687 for (j = 0; j < streamsArray.num_streams; j++) {
5688 if (streamsArray.stream_request[j].streamID == streamId) {
5689 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5690 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5691 else
5692 streamsArray.stream_request[j].buf_index = indexUsed;
5693 break;
5694 }
5695 }
5696 if (j == streamsArray.num_streams) {
5697 LOGE("Did not find matching stream to update index");
5698 assert(0);
5699 }
5700
5701 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5702 && mBatchSize) {
5703 mToBeQueuedVidBufs++;
5704 if (mToBeQueuedVidBufs == mBatchSize) {
5705 channel->queueBatchBuf();
5706 }
5707 }
5708 if (rc < 0) {
5709 LOGE("request failed");
5710 pthread_mutex_unlock(&mMutex);
5711 return rc;
5712 }
5713 }
5714 pendingBufferIter++;
5715 }
5716
5717 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5718 itr++) {
5719 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5720
5721 if (channel == NULL) {
5722 LOGE("invalid channel pointer for stream");
5723 assert(0);
5724 return BAD_VALUE;
5725 }
5726
5727 InternalRequest requestedStream;
5728 requestedStream = (*itr);
5729
5730
5731 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5732 LOGD("snapshot request internally input buffer %p, frame_number %d",
5733 request->input_buffer, frameNumber);
5734 if(request->input_buffer != NULL){
5735 rc = channel->request(NULL, frameNumber,
5736 pInputBuffer, &mReprocMeta, indexUsed, true,
5737 requestedStream.meteringOnly);
5738 if (rc < 0) {
5739 LOGE("Fail to request on picture channel");
5740 pthread_mutex_unlock(&mMutex);
5741 return rc;
5742 }
5743 } else {
5744 LOGD("snapshot request with frame_number %d", frameNumber);
5745 if (!request->settings) {
5746 rc = channel->request(NULL, frameNumber,
5747 NULL, mPrevParameters, indexUsed, true,
5748 requestedStream.meteringOnly);
5749 } else {
5750 rc = channel->request(NULL, frameNumber,
5751 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5752 }
5753 if (rc < 0) {
5754 LOGE("Fail to request on picture channel");
5755 pthread_mutex_unlock(&mMutex);
5756 return rc;
5757 }
5758
5759 if ((*itr).meteringOnly != 1) {
5760 requestedStream.need_metadata = 1;
5761 streams_need_metadata++;
5762 }
5763 }
5764
5765 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5766 uint32_t j = 0;
5767 for (j = 0; j < streamsArray.num_streams; j++) {
5768 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005769 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5770 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5771 else
5772 streamsArray.stream_request[j].buf_index = indexUsed;
5773 break;
5774 }
5775 }
5776 if (j == streamsArray.num_streams) {
5777 LOGE("Did not find matching stream to update index");
5778 assert(0);
5779 }
5780
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005781 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005782 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005783 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005784 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005785 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005786 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005787 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005788
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005789 //If 2 streams have need_metadata set to true, fail the request, unless
5790 //we copy/reference count the metadata buffer
5791 if (streams_need_metadata > 1) {
5792 LOGE("not supporting request in which two streams requires"
5793 " 2 HAL metadata for reprocessing");
5794 pthread_mutex_unlock(&mMutex);
5795 return -EINVAL;
5796 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005797
Emilian Peev656e4fa2017-06-02 16:47:04 +01005798 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5799 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5800 if (depthRequestPresent && mDepthChannel) {
5801 if (request->settings) {
5802 camera_metadata_ro_entry entry;
5803 if (find_camera_metadata_ro_entry(request->settings,
5804 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5805 if (entry.data.u8[0]) {
5806 pdafEnable = CAM_PD_DATA_ENABLED;
5807 } else {
5808 pdafEnable = CAM_PD_DATA_SKIP;
5809 }
5810 mDepthCloudMode = pdafEnable;
5811 } else {
5812 pdafEnable = mDepthCloudMode;
5813 }
5814 } else {
5815 pdafEnable = mDepthCloudMode;
5816 }
5817 }
5818
Emilian Peev7650c122017-01-19 08:24:33 -08005819 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5820 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5821 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5822 pthread_mutex_unlock(&mMutex);
5823 return BAD_VALUE;
5824 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005825
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005826 if (request->input_buffer == NULL) {
5827 /* Set the parameters to backend:
5828 * - For every request in NORMAL MODE
5829 * - For every request in HFR mode during preview only case
5830 * - Once every batch in HFR mode during video recording
5831 */
5832 if (!mBatchSize ||
5833 (mBatchSize && !isVidBufRequested) ||
5834 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5835 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5836 mBatchSize, isVidBufRequested,
5837 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005838
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005839 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5840 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5841 uint32_t m = 0;
5842 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5843 if (streamsArray.stream_request[k].streamID ==
5844 mBatchedStreamsArray.stream_request[m].streamID)
5845 break;
5846 }
5847 if (m == mBatchedStreamsArray.num_streams) {
5848 mBatchedStreamsArray.stream_request\
5849 [mBatchedStreamsArray.num_streams].streamID =
5850 streamsArray.stream_request[k].streamID;
5851 mBatchedStreamsArray.stream_request\
5852 [mBatchedStreamsArray.num_streams].buf_index =
5853 streamsArray.stream_request[k].buf_index;
5854 mBatchedStreamsArray.num_streams =
5855 mBatchedStreamsArray.num_streams + 1;
5856 }
5857 }
5858 streamsArray = mBatchedStreamsArray;
5859 }
5860 /* Update stream id of all the requested buffers */
5861 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5862 streamsArray)) {
5863 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005864 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005865 return BAD_VALUE;
5866 }
5867
5868 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5869 mParameters);
5870 if (rc < 0) {
5871 LOGE("set_parms failed");
5872 }
5873 /* reset to zero coz, the batch is queued */
5874 mToBeQueuedVidBufs = 0;
5875 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5876 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5877 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005878 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5879 uint32_t m = 0;
5880 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5881 if (streamsArray.stream_request[k].streamID ==
5882 mBatchedStreamsArray.stream_request[m].streamID)
5883 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005884 }
5885 if (m == mBatchedStreamsArray.num_streams) {
5886 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5887 streamID = streamsArray.stream_request[k].streamID;
5888 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5889 buf_index = streamsArray.stream_request[k].buf_index;
5890 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5891 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005892 }
5893 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005894 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005895
5896 // Start all streams after the first setting is sent, so that the
5897 // setting can be applied sooner: (0 + apply_delay)th frame.
5898 if (mState == CONFIGURED && mChannelHandle) {
5899 //Then start them.
5900 LOGH("Start META Channel");
5901 rc = mMetadataChannel->start();
5902 if (rc < 0) {
5903 LOGE("META channel start failed");
5904 pthread_mutex_unlock(&mMutex);
5905 return rc;
5906 }
5907
5908 if (mAnalysisChannel) {
5909 rc = mAnalysisChannel->start();
5910 if (rc < 0) {
5911 LOGE("Analysis channel start failed");
5912 mMetadataChannel->stop();
5913 pthread_mutex_unlock(&mMutex);
5914 return rc;
5915 }
5916 }
5917
5918 if (mSupportChannel) {
5919 rc = mSupportChannel->start();
5920 if (rc < 0) {
5921 LOGE("Support channel start failed");
5922 mMetadataChannel->stop();
5923 /* Although support and analysis are mutually exclusive today
5924 adding it in anycase for future proofing */
5925 if (mAnalysisChannel) {
5926 mAnalysisChannel->stop();
5927 }
5928 pthread_mutex_unlock(&mMutex);
5929 return rc;
5930 }
5931 }
5932 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5933 it != mStreamInfo.end(); it++) {
5934 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5935 LOGH("Start Processing Channel mask=%d",
5936 channel->getStreamTypeMask());
5937 rc = channel->start();
5938 if (rc < 0) {
5939 LOGE("channel start failed");
5940 pthread_mutex_unlock(&mMutex);
5941 return rc;
5942 }
5943 }
5944
5945 if (mRawDumpChannel) {
5946 LOGD("Starting raw dump stream");
5947 rc = mRawDumpChannel->start();
5948 if (rc != NO_ERROR) {
5949 LOGE("Error Starting Raw Dump Channel");
5950 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5951 it != mStreamInfo.end(); it++) {
5952 QCamera3Channel *channel =
5953 (QCamera3Channel *)(*it)->stream->priv;
5954 LOGH("Stopping Processing Channel mask=%d",
5955 channel->getStreamTypeMask());
5956 channel->stop();
5957 }
5958 if (mSupportChannel)
5959 mSupportChannel->stop();
5960 if (mAnalysisChannel) {
5961 mAnalysisChannel->stop();
5962 }
5963 mMetadataChannel->stop();
5964 pthread_mutex_unlock(&mMutex);
5965 return rc;
5966 }
5967 }
5968
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005969 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005970 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005971 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005972 if (rc != NO_ERROR) {
5973 LOGE("start_channel failed %d", rc);
5974 pthread_mutex_unlock(&mMutex);
5975 return rc;
5976 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005977
5978 {
5979 // Configure Easel for stream on.
5980 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005981
5982 // Now that sensor mode should have been selected, get the selected sensor mode
5983 // info.
5984 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5985 getCurrentSensorModeInfo(mSensorModeInfo);
5986
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005987 if (EaselManagerClientOpened) {
5988 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen44abb642017-06-02 18:00:38 -07005989 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5990 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005991 if (rc != OK) {
5992 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5993 mCameraId, mSensorModeInfo.op_pixel_clk);
5994 pthread_mutex_unlock(&mMutex);
5995 return rc;
5996 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005997 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005998 }
5999 }
6000
6001 // Start sensor streaming.
6002 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6003 mChannelHandle);
6004 if (rc != NO_ERROR) {
6005 LOGE("start_sensor_stream_on failed %d", rc);
6006 pthread_mutex_unlock(&mMutex);
6007 return rc;
6008 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006009 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006010 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006011 }
6012
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006013 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07006014 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006015 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -07006016 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006017 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6018 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6019 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6020 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
6021 rc = enableHdrPlusModeLocked();
6022 if (rc != OK) {
6023 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6024 pthread_mutex_unlock(&mMutex);
6025 return rc;
6026 }
6027
6028 mFirstPreviewIntentSeen = true;
6029 }
6030 }
6031
Thierry Strudel3d639192016-09-09 11:52:26 -07006032 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6033
6034 mState = STARTED;
6035 // Added a timed condition wait
6036 struct timespec ts;
6037 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006038 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006039 if (rc < 0) {
6040 isValidTimeout = 0;
6041 LOGE("Error reading the real time clock!!");
6042 }
6043 else {
6044 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006045 int64_t timeout = 5;
6046 {
6047 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6048 // If there is a pending HDR+ request, the following requests may be blocked until the
6049 // HDR+ request is done. So allow a longer timeout.
6050 if (mHdrPlusPendingRequests.size() > 0) {
6051 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6052 }
6053 }
6054 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006055 }
6056 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006057 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006058 (mState != ERROR) && (mState != DEINIT)) {
6059 if (!isValidTimeout) {
6060 LOGD("Blocking on conditional wait");
6061 pthread_cond_wait(&mRequestCond, &mMutex);
6062 }
6063 else {
6064 LOGD("Blocking on timed conditional wait");
6065 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6066 if (rc == ETIMEDOUT) {
6067 rc = -ENODEV;
6068 LOGE("Unblocked on timeout!!!!");
6069 break;
6070 }
6071 }
6072 LOGD("Unblocked");
6073 if (mWokenUpByDaemon) {
6074 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006075 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006076 break;
6077 }
6078 }
6079 pthread_mutex_unlock(&mMutex);
6080
6081 return rc;
6082}
6083
6084/*===========================================================================
6085 * FUNCTION : dump
6086 *
6087 * DESCRIPTION:
6088 *
6089 * PARAMETERS :
6090 *
6091 *
6092 * RETURN :
6093 *==========================================================================*/
6094void QCamera3HardwareInterface::dump(int fd)
6095{
6096 pthread_mutex_lock(&mMutex);
6097 dprintf(fd, "\n Camera HAL3 information Begin \n");
6098
6099 dprintf(fd, "\nNumber of pending requests: %zu \n",
6100 mPendingRequestsList.size());
6101 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6102 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6103 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6104 for(pendingRequestIterator i = mPendingRequestsList.begin();
6105 i != mPendingRequestsList.end(); i++) {
6106 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6107 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6108 i->input_buffer);
6109 }
6110 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6111 mPendingBuffersMap.get_num_overall_buffers());
6112 dprintf(fd, "-------+------------------\n");
6113 dprintf(fd, " Frame | Stream type mask \n");
6114 dprintf(fd, "-------+------------------\n");
6115 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6116 for(auto &j : req.mPendingBufferList) {
6117 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6118 dprintf(fd, " %5d | %11d \n",
6119 req.frame_number, channel->getStreamTypeMask());
6120 }
6121 }
6122 dprintf(fd, "-------+------------------\n");
6123
6124 dprintf(fd, "\nPending frame drop list: %zu\n",
6125 mPendingFrameDropList.size());
6126 dprintf(fd, "-------+-----------\n");
6127 dprintf(fd, " Frame | Stream ID \n");
6128 dprintf(fd, "-------+-----------\n");
6129 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6130 i != mPendingFrameDropList.end(); i++) {
6131 dprintf(fd, " %5d | %9d \n",
6132 i->frame_number, i->stream_ID);
6133 }
6134 dprintf(fd, "-------+-----------\n");
6135
6136 dprintf(fd, "\n Camera HAL3 information End \n");
6137
6138 /* use dumpsys media.camera as trigger to send update debug level event */
6139 mUpdateDebugLevel = true;
6140 pthread_mutex_unlock(&mMutex);
6141 return;
6142}
6143
6144/*===========================================================================
6145 * FUNCTION : flush
6146 *
6147 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6148 * conditionally restarts channels
6149 *
6150 * PARAMETERS :
6151 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006152 * @ stopChannelImmediately: stop the channel immediately. This should be used
6153 * when device encountered an error and MIPI may has
6154 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 *
6156 * RETURN :
6157 * 0 on success
6158 * Error code on failure
6159 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006160int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006161{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006162 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006163 int32_t rc = NO_ERROR;
6164
6165 LOGD("Unblocking Process Capture Request");
6166 pthread_mutex_lock(&mMutex);
6167 mFlush = true;
6168 pthread_mutex_unlock(&mMutex);
6169
6170 rc = stopAllChannels();
6171 // unlink of dualcam
6172 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006173 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6174 &m_pDualCamCmdPtr->bundle_info;
6175 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006176 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6177 pthread_mutex_lock(&gCamLock);
6178
6179 if (mIsMainCamera == 1) {
6180 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6181 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006182 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006183 // related session id should be session id of linked session
6184 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6185 } else {
6186 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6187 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006188 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006189 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6190 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006191 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006192 pthread_mutex_unlock(&gCamLock);
6193
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006194 rc = mCameraHandle->ops->set_dual_cam_cmd(
6195 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006196 if (rc < 0) {
6197 LOGE("Dualcam: Unlink failed, but still proceed to close");
6198 }
6199 }
6200
6201 if (rc < 0) {
6202 LOGE("stopAllChannels failed");
6203 return rc;
6204 }
6205 if (mChannelHandle) {
6206 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006207 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006208 }
6209
6210 // Reset bundle info
6211 rc = setBundleInfo();
6212 if (rc < 0) {
6213 LOGE("setBundleInfo failed %d", rc);
6214 return rc;
6215 }
6216
6217 // Mutex Lock
6218 pthread_mutex_lock(&mMutex);
6219
6220 // Unblock process_capture_request
6221 mPendingLiveRequest = 0;
6222 pthread_cond_signal(&mRequestCond);
6223
6224 rc = notifyErrorForPendingRequests();
6225 if (rc < 0) {
6226 LOGE("notifyErrorForPendingRequests failed");
6227 pthread_mutex_unlock(&mMutex);
6228 return rc;
6229 }
6230
6231 mFlush = false;
6232
6233 // Start the Streams/Channels
6234 if (restartChannels) {
6235 rc = startAllChannels();
6236 if (rc < 0) {
6237 LOGE("startAllChannels failed");
6238 pthread_mutex_unlock(&mMutex);
6239 return rc;
6240 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006241 if (mChannelHandle) {
6242 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006243 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006244 if (rc < 0) {
6245 LOGE("start_channel failed");
6246 pthread_mutex_unlock(&mMutex);
6247 return rc;
6248 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006249 }
6250 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006251 pthread_mutex_unlock(&mMutex);
6252
6253 return 0;
6254}
6255
6256/*===========================================================================
6257 * FUNCTION : flushPerf
6258 *
6259 * DESCRIPTION: This is the performance optimization version of flush that does
6260 * not use stream off, rather flushes the system
6261 *
6262 * PARAMETERS :
6263 *
6264 *
6265 * RETURN : 0 : success
6266 * -EINVAL: input is malformed (device is not valid)
6267 * -ENODEV: if the device has encountered a serious error
6268 *==========================================================================*/
6269int QCamera3HardwareInterface::flushPerf()
6270{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006271 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006272 int32_t rc = 0;
6273 struct timespec timeout;
6274 bool timed_wait = false;
6275
6276 pthread_mutex_lock(&mMutex);
6277 mFlushPerf = true;
6278 mPendingBuffersMap.numPendingBufsAtFlush =
6279 mPendingBuffersMap.get_num_overall_buffers();
6280 LOGD("Calling flush. Wait for %d buffers to return",
6281 mPendingBuffersMap.numPendingBufsAtFlush);
6282
6283 /* send the flush event to the backend */
6284 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6285 if (rc < 0) {
6286 LOGE("Error in flush: IOCTL failure");
6287 mFlushPerf = false;
6288 pthread_mutex_unlock(&mMutex);
6289 return -ENODEV;
6290 }
6291
6292 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6293 LOGD("No pending buffers in HAL, return flush");
6294 mFlushPerf = false;
6295 pthread_mutex_unlock(&mMutex);
6296 return rc;
6297 }
6298
6299 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006300 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006301 if (rc < 0) {
6302 LOGE("Error reading the real time clock, cannot use timed wait");
6303 } else {
6304 timeout.tv_sec += FLUSH_TIMEOUT;
6305 timed_wait = true;
6306 }
6307
6308 //Block on conditional variable
6309 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6310 LOGD("Waiting on mBuffersCond");
6311 if (!timed_wait) {
6312 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6313 if (rc != 0) {
6314 LOGE("pthread_cond_wait failed due to rc = %s",
6315 strerror(rc));
6316 break;
6317 }
6318 } else {
6319 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6320 if (rc != 0) {
6321 LOGE("pthread_cond_timedwait failed due to rc = %s",
6322 strerror(rc));
6323 break;
6324 }
6325 }
6326 }
6327 if (rc != 0) {
6328 mFlushPerf = false;
6329 pthread_mutex_unlock(&mMutex);
6330 return -ENODEV;
6331 }
6332
6333 LOGD("Received buffers, now safe to return them");
6334
6335 //make sure the channels handle flush
6336 //currently only required for the picture channel to release snapshot resources
6337 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6338 it != mStreamInfo.end(); it++) {
6339 QCamera3Channel *channel = (*it)->channel;
6340 if (channel) {
6341 rc = channel->flush();
6342 if (rc) {
6343 LOGE("Flushing the channels failed with error %d", rc);
6344 // even though the channel flush failed we need to continue and
6345 // return the buffers we have to the framework, however the return
6346 // value will be an error
6347 rc = -ENODEV;
6348 }
6349 }
6350 }
6351
6352 /* notify the frameworks and send errored results */
6353 rc = notifyErrorForPendingRequests();
6354 if (rc < 0) {
6355 LOGE("notifyErrorForPendingRequests failed");
6356 pthread_mutex_unlock(&mMutex);
6357 return rc;
6358 }
6359
6360 //unblock process_capture_request
6361 mPendingLiveRequest = 0;
6362 unblockRequestIfNecessary();
6363
6364 mFlushPerf = false;
6365 pthread_mutex_unlock(&mMutex);
6366 LOGD ("Flush Operation complete. rc = %d", rc);
6367 return rc;
6368}
6369
6370/*===========================================================================
6371 * FUNCTION : handleCameraDeviceError
6372 *
6373 * DESCRIPTION: This function calls internal flush and notifies the error to
6374 * framework and updates the state variable.
6375 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006376 * PARAMETERS :
6377 * @stopChannelImmediately : stop channels immediately without waiting for
6378 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006379 *
6380 * RETURN : NO_ERROR on Success
6381 * Error code on failure
6382 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006383int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006384{
6385 int32_t rc = NO_ERROR;
6386
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006387 {
6388 Mutex::Autolock lock(mFlushLock);
6389 pthread_mutex_lock(&mMutex);
6390 if (mState != ERROR) {
6391 //if mState != ERROR, nothing to be done
6392 pthread_mutex_unlock(&mMutex);
6393 return NO_ERROR;
6394 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006395 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006396
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006397 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006398 if (NO_ERROR != rc) {
6399 LOGE("internal flush to handle mState = ERROR failed");
6400 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006401
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006402 pthread_mutex_lock(&mMutex);
6403 mState = DEINIT;
6404 pthread_mutex_unlock(&mMutex);
6405 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006406
6407 camera3_notify_msg_t notify_msg;
6408 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6409 notify_msg.type = CAMERA3_MSG_ERROR;
6410 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6411 notify_msg.message.error.error_stream = NULL;
6412 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006413 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006414
6415 return rc;
6416}
6417
6418/*===========================================================================
6419 * FUNCTION : captureResultCb
6420 *
6421 * DESCRIPTION: Callback handler for all capture result
6422 * (streams, as well as metadata)
6423 *
6424 * PARAMETERS :
6425 * @metadata : metadata information
6426 * @buffer : actual gralloc buffer to be returned to frameworks.
6427 * NULL if metadata.
6428 *
6429 * RETURN : NONE
6430 *==========================================================================*/
6431void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6432 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6433{
6434 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006435 pthread_mutex_lock(&mMutex);
6436 uint8_t batchSize = mBatchSize;
6437 pthread_mutex_unlock(&mMutex);
6438 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006439 handleBatchMetadata(metadata_buf,
6440 true /* free_and_bufdone_meta_buf */);
6441 } else { /* mBatchSize = 0 */
6442 hdrPlusPerfLock(metadata_buf);
6443 pthread_mutex_lock(&mMutex);
6444 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006445 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006446 true /* last urgent frame of batch metadata */,
6447 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006448 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006449 pthread_mutex_unlock(&mMutex);
6450 }
6451 } else if (isInputBuffer) {
6452 pthread_mutex_lock(&mMutex);
6453 handleInputBufferWithLock(frame_number);
6454 pthread_mutex_unlock(&mMutex);
6455 } else {
6456 pthread_mutex_lock(&mMutex);
6457 handleBufferWithLock(buffer, frame_number);
6458 pthread_mutex_unlock(&mMutex);
6459 }
6460 return;
6461}
6462
6463/*===========================================================================
6464 * FUNCTION : getReprocessibleOutputStreamId
6465 *
6466 * DESCRIPTION: Get source output stream id for the input reprocess stream
6467 * based on size and format, which would be the largest
6468 * output stream if an input stream exists.
6469 *
6470 * PARAMETERS :
6471 * @id : return the stream id if found
6472 *
6473 * RETURN : int32_t type of status
6474 * NO_ERROR -- success
6475 * none-zero failure code
6476 *==========================================================================*/
6477int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6478{
6479 /* check if any output or bidirectional stream with the same size and format
6480 and return that stream */
6481 if ((mInputStreamInfo.dim.width > 0) &&
6482 (mInputStreamInfo.dim.height > 0)) {
6483 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6484 it != mStreamInfo.end(); it++) {
6485
6486 camera3_stream_t *stream = (*it)->stream;
6487 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6488 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6489 (stream->format == mInputStreamInfo.format)) {
6490 // Usage flag for an input stream and the source output stream
6491 // may be different.
6492 LOGD("Found reprocessible output stream! %p", *it);
6493 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6494 stream->usage, mInputStreamInfo.usage);
6495
6496 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6497 if (channel != NULL && channel->mStreams[0]) {
6498 id = channel->mStreams[0]->getMyServerID();
6499 return NO_ERROR;
6500 }
6501 }
6502 }
6503 } else {
6504 LOGD("No input stream, so no reprocessible output stream");
6505 }
6506 return NAME_NOT_FOUND;
6507}
6508
6509/*===========================================================================
6510 * FUNCTION : lookupFwkName
6511 *
6512 * DESCRIPTION: In case the enum is not same in fwk and backend
6513 * make sure the parameter is correctly propogated
6514 *
6515 * PARAMETERS :
6516 * @arr : map between the two enums
6517 * @len : len of the map
6518 * @hal_name : name of the hal_parm to map
6519 *
6520 * RETURN : int type of status
6521 * fwk_name -- success
6522 * none-zero failure code
6523 *==========================================================================*/
6524template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6525 size_t len, halType hal_name)
6526{
6527
6528 for (size_t i = 0; i < len; i++) {
6529 if (arr[i].hal_name == hal_name) {
6530 return arr[i].fwk_name;
6531 }
6532 }
6533
6534 /* Not able to find matching framework type is not necessarily
6535 * an error case. This happens when mm-camera supports more attributes
6536 * than the frameworks do */
6537 LOGH("Cannot find matching framework type");
6538 return NAME_NOT_FOUND;
6539}
6540
6541/*===========================================================================
6542 * FUNCTION : lookupHalName
6543 *
6544 * DESCRIPTION: In case the enum is not same in fwk and backend
6545 * make sure the parameter is correctly propogated
6546 *
6547 * PARAMETERS :
6548 * @arr : map between the two enums
6549 * @len : len of the map
6550 * @fwk_name : name of the hal_parm to map
6551 *
6552 * RETURN : int32_t type of status
6553 * hal_name -- success
6554 * none-zero failure code
6555 *==========================================================================*/
6556template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6557 size_t len, fwkType fwk_name)
6558{
6559 for (size_t i = 0; i < len; i++) {
6560 if (arr[i].fwk_name == fwk_name) {
6561 return arr[i].hal_name;
6562 }
6563 }
6564
6565 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6566 return NAME_NOT_FOUND;
6567}
6568
6569/*===========================================================================
6570 * FUNCTION : lookupProp
6571 *
6572 * DESCRIPTION: lookup a value by its name
6573 *
6574 * PARAMETERS :
6575 * @arr : map between the two enums
6576 * @len : size of the map
6577 * @name : name to be looked up
6578 *
6579 * RETURN : Value if found
6580 * CAM_CDS_MODE_MAX if not found
6581 *==========================================================================*/
6582template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6583 size_t len, const char *name)
6584{
6585 if (name) {
6586 for (size_t i = 0; i < len; i++) {
6587 if (!strcmp(arr[i].desc, name)) {
6588 return arr[i].val;
6589 }
6590 }
6591 }
6592 return CAM_CDS_MODE_MAX;
6593}
6594
6595/*===========================================================================
6596 *
6597 * DESCRIPTION:
6598 *
6599 * PARAMETERS :
6600 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006601 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006602 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006603 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6604 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006605 *
6606 * RETURN : camera_metadata_t*
6607 * metadata in a format specified by fwk
6608 *==========================================================================*/
6609camera_metadata_t*
6610QCamera3HardwareInterface::translateFromHalMetadata(
6611 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006612 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006613 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006614 bool lastMetadataInBatch,
6615 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006616{
6617 CameraMetadata camMetadata;
6618 camera_metadata_t *resultMetadata;
6619
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006620 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006621 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6622 * Timestamp is needed because it's used for shutter notify calculation.
6623 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006624 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006625 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006626 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006627 }
6628
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006629 if (pendingRequest.jpegMetadata.entryCount())
6630 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006631
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006632 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6633 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6634 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6635 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6636 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006637 if (mBatchSize == 0) {
6638 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006639 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006640 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006641
Samuel Ha68ba5172016-12-15 18:41:12 -08006642 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6643 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006644 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006645 // DevCamDebug metadata translateFromHalMetadata AF
6646 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6647 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6648 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6649 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6650 }
6651 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6652 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6653 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6654 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6655 }
6656 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6657 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6658 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6659 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6660 }
6661 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6662 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6663 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6664 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6665 }
6666 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6667 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6668 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6669 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6670 }
6671 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6672 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6673 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6674 *DevCamDebug_af_monitor_pdaf_target_pos;
6675 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6676 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6677 }
6678 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6679 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6680 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6681 *DevCamDebug_af_monitor_pdaf_confidence;
6682 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6683 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6684 }
6685 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6686 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6687 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6688 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6689 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6690 }
6691 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6692 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6693 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6694 *DevCamDebug_af_monitor_tof_target_pos;
6695 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6696 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6697 }
6698 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6699 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6700 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6701 *DevCamDebug_af_monitor_tof_confidence;
6702 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6703 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6704 }
6705 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6706 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6707 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6708 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6709 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6710 }
6711 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6712 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6713 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6714 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6715 &fwk_DevCamDebug_af_monitor_type_select, 1);
6716 }
6717 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6718 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6719 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6720 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6721 &fwk_DevCamDebug_af_monitor_refocus, 1);
6722 }
6723 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6724 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6725 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6726 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6727 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6728 }
6729 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6730 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6731 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6732 *DevCamDebug_af_search_pdaf_target_pos;
6733 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6734 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6735 }
6736 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6737 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6738 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6739 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6740 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6741 }
6742 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6743 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6744 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6745 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6746 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6747 }
6748 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6749 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6750 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6751 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6752 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6753 }
6754 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6755 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6756 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6757 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6758 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6759 }
6760 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6761 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6762 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6763 *DevCamDebug_af_search_tof_target_pos;
6764 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6765 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6766 }
6767 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6768 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6769 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6770 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6771 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6772 }
6773 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6774 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6775 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6776 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6777 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6778 }
6779 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6780 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6781 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6782 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6783 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6784 }
6785 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6786 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6787 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6788 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6789 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6790 }
6791 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6792 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6793 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6794 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6795 &fwk_DevCamDebug_af_search_type_select, 1);
6796 }
6797 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6798 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6799 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6800 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6801 &fwk_DevCamDebug_af_search_next_pos, 1);
6802 }
6803 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6804 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6805 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6806 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6807 &fwk_DevCamDebug_af_search_target_pos, 1);
6808 }
6809 // DevCamDebug metadata translateFromHalMetadata AEC
6810 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6811 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6812 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6813 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6814 }
6815 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6816 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6817 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6818 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6819 }
6820 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6821 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6822 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6823 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6824 }
6825 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6826 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6827 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6828 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6829 }
6830 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6831 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6832 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6833 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6834 }
6835 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6836 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6837 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6838 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6839 }
6840 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6841 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6842 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6843 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6844 }
6845 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6846 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6847 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6848 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6849 }
Samuel Ha34229982017-02-17 13:51:11 -08006850 // DevCamDebug metadata translateFromHalMetadata zzHDR
6851 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6852 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6853 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6854 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6855 }
6856 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6857 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006858 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006859 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6860 }
6861 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6862 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6863 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6864 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6865 }
6866 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6867 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006868 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006869 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6870 }
6871 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6872 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6873 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6874 *DevCamDebug_aec_hdr_sensitivity_ratio;
6875 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6876 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6877 }
6878 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6879 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6880 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6881 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6882 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6883 }
6884 // DevCamDebug metadata translateFromHalMetadata ADRC
6885 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6886 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6887 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6888 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6889 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6890 }
6891 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6892 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6893 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6894 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6895 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6896 }
6897 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6898 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6899 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6900 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6901 }
6902 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6903 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6904 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6905 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6906 }
6907 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6908 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6909 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6910 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6911 }
6912 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6913 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6914 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6915 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6916 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006917 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6918 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6919 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6920 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6921 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6922 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6923 }
6924 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6925 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6926 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6927 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6928 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6929 }
6930 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6931 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6932 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6933 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6934 &fwk_DevCamDebug_aec_subject_motion, 1);
6935 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006936 // DevCamDebug metadata translateFromHalMetadata AWB
6937 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6938 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6939 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6940 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6941 }
6942 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6943 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6944 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6945 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6946 }
6947 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6948 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6949 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6950 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6951 }
6952 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6953 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6954 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6955 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6956 }
6957 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6958 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6959 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6960 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6961 }
6962 }
6963 // atrace_end(ATRACE_TAG_ALWAYS);
6964
Thierry Strudel3d639192016-09-09 11:52:26 -07006965 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6966 int64_t fwk_frame_number = *frame_number;
6967 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6968 }
6969
6970 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6971 int32_t fps_range[2];
6972 fps_range[0] = (int32_t)float_range->min_fps;
6973 fps_range[1] = (int32_t)float_range->max_fps;
6974 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6975 fps_range, 2);
6976 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6977 fps_range[0], fps_range[1]);
6978 }
6979
6980 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6981 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6982 }
6983
6984 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6985 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6986 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6987 *sceneMode);
6988 if (NAME_NOT_FOUND != val) {
6989 uint8_t fwkSceneMode = (uint8_t)val;
6990 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6991 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6992 fwkSceneMode);
6993 }
6994 }
6995
6996 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6997 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6998 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6999 }
7000
7001 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7002 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7003 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7004 }
7005
7006 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7007 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7008 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7009 }
7010
7011 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7012 CAM_INTF_META_EDGE_MODE, metadata) {
7013 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7014 }
7015
7016 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7017 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7018 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7019 }
7020
7021 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7022 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7023 }
7024
7025 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7026 if (0 <= *flashState) {
7027 uint8_t fwk_flashState = (uint8_t) *flashState;
7028 if (!gCamCapability[mCameraId]->flash_available) {
7029 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7030 }
7031 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7032 }
7033 }
7034
7035 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7036 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7037 if (NAME_NOT_FOUND != val) {
7038 uint8_t fwk_flashMode = (uint8_t)val;
7039 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7040 }
7041 }
7042
7043 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7044 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7045 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7046 }
7047
7048 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7049 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7050 }
7051
7052 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7053 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7054 }
7055
7056 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7057 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7058 }
7059
7060 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7061 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7062 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7063 }
7064
7065 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7066 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7067 LOGD("fwk_videoStab = %d", fwk_videoStab);
7068 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7069 } else {
7070 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7071 // and so hardcoding the Video Stab result to OFF mode.
7072 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7073 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007074 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007075 }
7076
7077 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7078 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7079 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7080 }
7081
7082 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7083 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7084 }
7085
Thierry Strudel3d639192016-09-09 11:52:26 -07007086 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7087 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007088 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007089
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007090 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7091 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007092
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007093 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007094 blackLevelAppliedPattern->cam_black_level[0],
7095 blackLevelAppliedPattern->cam_black_level[1],
7096 blackLevelAppliedPattern->cam_black_level[2],
7097 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007098 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7099 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007100
7101#ifndef USE_HAL_3_3
7102 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307103 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007104 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307105 fwk_blackLevelInd[0] /= 16.0;
7106 fwk_blackLevelInd[1] /= 16.0;
7107 fwk_blackLevelInd[2] /= 16.0;
7108 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007109 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7110 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007111#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007112 }
7113
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007114#ifndef USE_HAL_3_3
7115 // Fixed whitelevel is used by ISP/Sensor
7116 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7117 &gCamCapability[mCameraId]->white_level, 1);
7118#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007119
7120 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7121 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7122 int32_t scalerCropRegion[4];
7123 scalerCropRegion[0] = hScalerCropRegion->left;
7124 scalerCropRegion[1] = hScalerCropRegion->top;
7125 scalerCropRegion[2] = hScalerCropRegion->width;
7126 scalerCropRegion[3] = hScalerCropRegion->height;
7127
7128 // Adjust crop region from sensor output coordinate system to active
7129 // array coordinate system.
7130 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7131 scalerCropRegion[2], scalerCropRegion[3]);
7132
7133 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7134 }
7135
7136 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7137 LOGD("sensorExpTime = %lld", *sensorExpTime);
7138 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7139 }
7140
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007141 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7142 LOGD("expTimeBoost = %f", *expTimeBoost);
7143 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7144 }
7145
Thierry Strudel3d639192016-09-09 11:52:26 -07007146 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7147 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7148 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7149 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7150 }
7151
7152 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7153 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7154 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7155 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7156 sensorRollingShutterSkew, 1);
7157 }
7158
7159 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7160 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7161 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7162
7163 //calculate the noise profile based on sensitivity
7164 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7165 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7166 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7167 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7168 noise_profile[i] = noise_profile_S;
7169 noise_profile[i+1] = noise_profile_O;
7170 }
7171 LOGD("noise model entry (S, O) is (%f, %f)",
7172 noise_profile_S, noise_profile_O);
7173 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7174 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7175 }
7176
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007177#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007178 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007179 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007180 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007181 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007182 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7183 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7184 }
7185 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007186#endif
7187
Thierry Strudel3d639192016-09-09 11:52:26 -07007188 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7189 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7190 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7191 }
7192
7193 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7194 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7195 *faceDetectMode);
7196 if (NAME_NOT_FOUND != val) {
7197 uint8_t fwk_faceDetectMode = (uint8_t)val;
7198 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7199
7200 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7201 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7202 CAM_INTF_META_FACE_DETECTION, metadata) {
7203 uint8_t numFaces = MIN(
7204 faceDetectionInfo->num_faces_detected, MAX_ROI);
7205 int32_t faceIds[MAX_ROI];
7206 uint8_t faceScores[MAX_ROI];
7207 int32_t faceRectangles[MAX_ROI * 4];
7208 int32_t faceLandmarks[MAX_ROI * 6];
7209 size_t j = 0, k = 0;
7210
7211 for (size_t i = 0; i < numFaces; i++) {
7212 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7213 // Adjust crop region from sensor output coordinate system to active
7214 // array coordinate system.
7215 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7216 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7217 rect.width, rect.height);
7218
7219 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7220 faceRectangles+j, -1);
7221
Jason Lee8ce36fa2017-04-19 19:40:37 -07007222 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7223 "bottom-right (%d, %d)",
7224 faceDetectionInfo->frame_id, i,
7225 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7226 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7227
Thierry Strudel3d639192016-09-09 11:52:26 -07007228 j+= 4;
7229 }
7230 if (numFaces <= 0) {
7231 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7232 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7233 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7234 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7235 }
7236
7237 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7238 numFaces);
7239 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7240 faceRectangles, numFaces * 4U);
7241 if (fwk_faceDetectMode ==
7242 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7243 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7244 CAM_INTF_META_FACE_LANDMARK, metadata) {
7245
7246 for (size_t i = 0; i < numFaces; i++) {
7247 // Map the co-ordinate sensor output coordinate system to active
7248 // array coordinate system.
7249 mCropRegionMapper.toActiveArray(
7250 landmarks->face_landmarks[i].left_eye_center.x,
7251 landmarks->face_landmarks[i].left_eye_center.y);
7252 mCropRegionMapper.toActiveArray(
7253 landmarks->face_landmarks[i].right_eye_center.x,
7254 landmarks->face_landmarks[i].right_eye_center.y);
7255 mCropRegionMapper.toActiveArray(
7256 landmarks->face_landmarks[i].mouth_center.x,
7257 landmarks->face_landmarks[i].mouth_center.y);
7258
7259 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007260
7261 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7262 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7263 faceDetectionInfo->frame_id, i,
7264 faceLandmarks[k + LEFT_EYE_X],
7265 faceLandmarks[k + LEFT_EYE_Y],
7266 faceLandmarks[k + RIGHT_EYE_X],
7267 faceLandmarks[k + RIGHT_EYE_Y],
7268 faceLandmarks[k + MOUTH_X],
7269 faceLandmarks[k + MOUTH_Y]);
7270
Thierry Strudel04e026f2016-10-10 11:27:36 -07007271 k+= TOTAL_LANDMARK_INDICES;
7272 }
7273 } else {
7274 for (size_t i = 0; i < numFaces; i++) {
7275 setInvalidLandmarks(faceLandmarks+k);
7276 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007277 }
7278 }
7279
Jason Lee49619db2017-04-13 12:07:22 -07007280 for (size_t i = 0; i < numFaces; i++) {
7281 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7282
7283 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7284 faceDetectionInfo->frame_id, i, faceIds[i]);
7285 }
7286
Thierry Strudel3d639192016-09-09 11:52:26 -07007287 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7288 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7289 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007290 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007291 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7292 CAM_INTF_META_FACE_BLINK, metadata) {
7293 uint8_t detected[MAX_ROI];
7294 uint8_t degree[MAX_ROI * 2];
7295 for (size_t i = 0; i < numFaces; i++) {
7296 detected[i] = blinks->blink[i].blink_detected;
7297 degree[2 * i] = blinks->blink[i].left_blink;
7298 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007299
Jason Lee49619db2017-04-13 12:07:22 -07007300 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7301 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7302 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7303 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007304 }
7305 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7306 detected, numFaces);
7307 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7308 degree, numFaces * 2);
7309 }
7310 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7311 CAM_INTF_META_FACE_SMILE, metadata) {
7312 uint8_t degree[MAX_ROI];
7313 uint8_t confidence[MAX_ROI];
7314 for (size_t i = 0; i < numFaces; i++) {
7315 degree[i] = smiles->smile[i].smile_degree;
7316 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007317
Jason Lee49619db2017-04-13 12:07:22 -07007318 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7319 "smile_degree=%d, smile_score=%d",
7320 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007321 }
7322 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7323 degree, numFaces);
7324 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7325 confidence, numFaces);
7326 }
7327 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7328 CAM_INTF_META_FACE_GAZE, metadata) {
7329 int8_t angle[MAX_ROI];
7330 int32_t direction[MAX_ROI * 3];
7331 int8_t degree[MAX_ROI * 2];
7332 for (size_t i = 0; i < numFaces; i++) {
7333 angle[i] = gazes->gaze[i].gaze_angle;
7334 direction[3 * i] = gazes->gaze[i].updown_dir;
7335 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7336 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7337 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7338 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007339
7340 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7341 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7342 "left_right_gaze=%d, top_bottom_gaze=%d",
7343 faceDetectionInfo->frame_id, i, angle[i],
7344 direction[3 * i], direction[3 * i + 1],
7345 direction[3 * i + 2],
7346 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007347 }
7348 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7349 (uint8_t *)angle, numFaces);
7350 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7351 direction, numFaces * 3);
7352 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7353 (uint8_t *)degree, numFaces * 2);
7354 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007355 }
7356 }
7357 }
7358 }
7359
7360 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7361 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007362 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007363 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007364 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007365
Shuzhen Wang14415f52016-11-16 18:26:18 -08007366 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7367 histogramBins = *histBins;
7368 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7369 }
7370
7371 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007372 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7373 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007374 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007375
7376 switch (stats_data->type) {
7377 case CAM_HISTOGRAM_TYPE_BAYER:
7378 switch (stats_data->bayer_stats.data_type) {
7379 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007380 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7381 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007382 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007383 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7384 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007385 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007386 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7387 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007388 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007389 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007390 case CAM_STATS_CHANNEL_R:
7391 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007392 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7393 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007394 }
7395 break;
7396 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007397 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007398 break;
7399 }
7400
Shuzhen Wang14415f52016-11-16 18:26:18 -08007401 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007402 }
7403 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007404 }
7405
7406 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7407 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7408 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7409 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7410 }
7411
7412 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7413 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7414 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7415 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7416 }
7417
7418 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7419 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7420 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7421 CAM_MAX_SHADING_MAP_HEIGHT);
7422 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7423 CAM_MAX_SHADING_MAP_WIDTH);
7424 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7425 lensShadingMap->lens_shading, 4U * map_width * map_height);
7426 }
7427
7428 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7429 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7430 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7431 }
7432
7433 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7434 //Populate CAM_INTF_META_TONEMAP_CURVES
7435 /* ch0 = G, ch 1 = B, ch 2 = R*/
7436 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7437 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7438 tonemap->tonemap_points_cnt,
7439 CAM_MAX_TONEMAP_CURVE_SIZE);
7440 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7441 }
7442
7443 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7444 &tonemap->curves[0].tonemap_points[0][0],
7445 tonemap->tonemap_points_cnt * 2);
7446
7447 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7448 &tonemap->curves[1].tonemap_points[0][0],
7449 tonemap->tonemap_points_cnt * 2);
7450
7451 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7452 &tonemap->curves[2].tonemap_points[0][0],
7453 tonemap->tonemap_points_cnt * 2);
7454 }
7455
7456 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7457 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7458 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7459 CC_GAIN_MAX);
7460 }
7461
7462 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7463 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7464 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7465 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7466 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7467 }
7468
7469 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7470 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7471 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7472 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7473 toneCurve->tonemap_points_cnt,
7474 CAM_MAX_TONEMAP_CURVE_SIZE);
7475 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7476 }
7477 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7478 (float*)toneCurve->curve.tonemap_points,
7479 toneCurve->tonemap_points_cnt * 2);
7480 }
7481
7482 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7483 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7484 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7485 predColorCorrectionGains->gains, 4);
7486 }
7487
7488 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7489 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7490 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7491 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7492 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7493 }
7494
7495 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7496 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7497 }
7498
7499 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7500 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7501 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7502 }
7503
7504 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7505 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7506 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7507 }
7508
7509 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7510 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7511 *effectMode);
7512 if (NAME_NOT_FOUND != val) {
7513 uint8_t fwk_effectMode = (uint8_t)val;
7514 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7515 }
7516 }
7517
7518 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7519 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7520 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7521 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7522 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7523 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7524 }
7525 int32_t fwk_testPatternData[4];
7526 fwk_testPatternData[0] = testPatternData->r;
7527 fwk_testPatternData[3] = testPatternData->b;
7528 switch (gCamCapability[mCameraId]->color_arrangement) {
7529 case CAM_FILTER_ARRANGEMENT_RGGB:
7530 case CAM_FILTER_ARRANGEMENT_GRBG:
7531 fwk_testPatternData[1] = testPatternData->gr;
7532 fwk_testPatternData[2] = testPatternData->gb;
7533 break;
7534 case CAM_FILTER_ARRANGEMENT_GBRG:
7535 case CAM_FILTER_ARRANGEMENT_BGGR:
7536 fwk_testPatternData[2] = testPatternData->gr;
7537 fwk_testPatternData[1] = testPatternData->gb;
7538 break;
7539 default:
7540 LOGE("color arrangement %d is not supported",
7541 gCamCapability[mCameraId]->color_arrangement);
7542 break;
7543 }
7544 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7545 }
7546
7547 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7548 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7549 }
7550
7551 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7552 String8 str((const char *)gps_methods);
7553 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7554 }
7555
7556 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7557 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7558 }
7559
7560 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7561 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7562 }
7563
7564 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7565 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7566 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7567 }
7568
7569 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7570 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7571 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7572 }
7573
7574 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7575 int32_t fwk_thumb_size[2];
7576 fwk_thumb_size[0] = thumb_size->width;
7577 fwk_thumb_size[1] = thumb_size->height;
7578 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7579 }
7580
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007581 // Skip reprocess metadata if there is no input stream.
7582 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7583 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7584 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7585 privateData,
7586 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7587 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007588 }
7589
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007590 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007591 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007592 meteringMode, 1);
7593 }
7594
Thierry Strudel54dc9782017-02-15 12:12:10 -08007595 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7596 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7597 LOGD("hdr_scene_data: %d %f\n",
7598 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7599 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7600 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7601 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7602 &isHdr, 1);
7603 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7604 &isHdrConfidence, 1);
7605 }
7606
7607
7608
Thierry Strudel3d639192016-09-09 11:52:26 -07007609 if (metadata->is_tuning_params_valid) {
7610 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7611 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7612 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7613
7614
7615 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7616 sizeof(uint32_t));
7617 data += sizeof(uint32_t);
7618
7619 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7620 sizeof(uint32_t));
7621 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7622 data += sizeof(uint32_t);
7623
7624 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7625 sizeof(uint32_t));
7626 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7627 data += sizeof(uint32_t);
7628
7629 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7630 sizeof(uint32_t));
7631 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7632 data += sizeof(uint32_t);
7633
7634 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7635 sizeof(uint32_t));
7636 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7637 data += sizeof(uint32_t);
7638
7639 metadata->tuning_params.tuning_mod3_data_size = 0;
7640 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7641 sizeof(uint32_t));
7642 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7643 data += sizeof(uint32_t);
7644
7645 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7646 TUNING_SENSOR_DATA_MAX);
7647 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7648 count);
7649 data += count;
7650
7651 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7652 TUNING_VFE_DATA_MAX);
7653 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7654 count);
7655 data += count;
7656
7657 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7658 TUNING_CPP_DATA_MAX);
7659 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7660 count);
7661 data += count;
7662
7663 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7664 TUNING_CAC_DATA_MAX);
7665 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7666 count);
7667 data += count;
7668
7669 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7670 (int32_t *)(void *)tuning_meta_data_blob,
7671 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7672 }
7673
7674 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7675 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7676 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7677 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7678 NEUTRAL_COL_POINTS);
7679 }
7680
7681 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7682 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7683 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7684 }
7685
7686 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7687 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7688 // Adjust crop region from sensor output coordinate system to active
7689 // array coordinate system.
7690 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7691 hAeRegions->rect.width, hAeRegions->rect.height);
7692
7693 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7694 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7695 REGIONS_TUPLE_COUNT);
7696 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7697 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7698 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7699 hAeRegions->rect.height);
7700 }
7701
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007702 if (!pendingRequest.focusStateSent) {
7703 if (pendingRequest.focusStateValid) {
7704 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7705 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007706 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007707 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7708 uint8_t fwk_afState = (uint8_t) *afState;
7709 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7710 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7711 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007712 }
7713 }
7714
Thierry Strudel3d639192016-09-09 11:52:26 -07007715 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7716 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7717 }
7718
7719 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7720 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7721 }
7722
7723 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7724 uint8_t fwk_lensState = *lensState;
7725 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7726 }
7727
Thierry Strudel3d639192016-09-09 11:52:26 -07007728 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007729 uint32_t ab_mode = *hal_ab_mode;
7730 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7731 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7732 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7733 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007734 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007735 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007736 if (NAME_NOT_FOUND != val) {
7737 uint8_t fwk_ab_mode = (uint8_t)val;
7738 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7739 }
7740 }
7741
7742 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7743 int val = lookupFwkName(SCENE_MODES_MAP,
7744 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7745 if (NAME_NOT_FOUND != val) {
7746 uint8_t fwkBestshotMode = (uint8_t)val;
7747 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7748 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7749 } else {
7750 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7751 }
7752 }
7753
7754 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7755 uint8_t fwk_mode = (uint8_t) *mode;
7756 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7757 }
7758
7759 /* Constant metadata values to be update*/
7760 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7761 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7762
7763 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7764 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7765
7766 int32_t hotPixelMap[2];
7767 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7768
7769 // CDS
7770 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7771 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7772 }
7773
Thierry Strudel04e026f2016-10-10 11:27:36 -07007774 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7775 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007776 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007777 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7778 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7779 } else {
7780 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7781 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007782
7783 if(fwk_hdr != curr_hdr_state) {
7784 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7785 if(fwk_hdr)
7786 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7787 else
7788 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7789 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007790 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7791 }
7792
Thierry Strudel54dc9782017-02-15 12:12:10 -08007793 //binning correction
7794 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7795 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7796 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7797 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7798 }
7799
Thierry Strudel04e026f2016-10-10 11:27:36 -07007800 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007801 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007802 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7803 int8_t is_ir_on = 0;
7804
7805 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7806 if(is_ir_on != curr_ir_state) {
7807 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7808 if(is_ir_on)
7809 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7810 else
7811 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7812 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007813 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007814 }
7815
Thierry Strudel269c81a2016-10-12 12:13:59 -07007816 // AEC SPEED
7817 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7818 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7819 }
7820
7821 // AWB SPEED
7822 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7823 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7824 }
7825
Thierry Strudel3d639192016-09-09 11:52:26 -07007826 // TNR
7827 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7828 uint8_t tnr_enable = tnr->denoise_enable;
7829 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007830 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7831 int8_t is_tnr_on = 0;
7832
7833 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7834 if(is_tnr_on != curr_tnr_state) {
7835 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7836 if(is_tnr_on)
7837 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7838 else
7839 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7840 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007841
7842 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7843 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7844 }
7845
7846 // Reprocess crop data
7847 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7848 uint8_t cnt = crop_data->num_of_streams;
7849 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7850 // mm-qcamera-daemon only posts crop_data for streams
7851 // not linked to pproc. So no valid crop metadata is not
7852 // necessarily an error case.
7853 LOGD("No valid crop metadata entries");
7854 } else {
7855 uint32_t reproc_stream_id;
7856 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7857 LOGD("No reprocessible stream found, ignore crop data");
7858 } else {
7859 int rc = NO_ERROR;
7860 Vector<int32_t> roi_map;
7861 int32_t *crop = new int32_t[cnt*4];
7862 if (NULL == crop) {
7863 rc = NO_MEMORY;
7864 }
7865 if (NO_ERROR == rc) {
7866 int32_t streams_found = 0;
7867 for (size_t i = 0; i < cnt; i++) {
7868 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7869 if (pprocDone) {
7870 // HAL already does internal reprocessing,
7871 // either via reprocessing before JPEG encoding,
7872 // or offline postprocessing for pproc bypass case.
7873 crop[0] = 0;
7874 crop[1] = 0;
7875 crop[2] = mInputStreamInfo.dim.width;
7876 crop[3] = mInputStreamInfo.dim.height;
7877 } else {
7878 crop[0] = crop_data->crop_info[i].crop.left;
7879 crop[1] = crop_data->crop_info[i].crop.top;
7880 crop[2] = crop_data->crop_info[i].crop.width;
7881 crop[3] = crop_data->crop_info[i].crop.height;
7882 }
7883 roi_map.add(crop_data->crop_info[i].roi_map.left);
7884 roi_map.add(crop_data->crop_info[i].roi_map.top);
7885 roi_map.add(crop_data->crop_info[i].roi_map.width);
7886 roi_map.add(crop_data->crop_info[i].roi_map.height);
7887 streams_found++;
7888 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7889 crop[0], crop[1], crop[2], crop[3]);
7890 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7891 crop_data->crop_info[i].roi_map.left,
7892 crop_data->crop_info[i].roi_map.top,
7893 crop_data->crop_info[i].roi_map.width,
7894 crop_data->crop_info[i].roi_map.height);
7895 break;
7896
7897 }
7898 }
7899 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7900 &streams_found, 1);
7901 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7902 crop, (size_t)(streams_found * 4));
7903 if (roi_map.array()) {
7904 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7905 roi_map.array(), roi_map.size());
7906 }
7907 }
7908 if (crop) {
7909 delete [] crop;
7910 }
7911 }
7912 }
7913 }
7914
7915 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7916 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7917 // so hardcoding the CAC result to OFF mode.
7918 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7919 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7920 } else {
7921 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7922 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7923 *cacMode);
7924 if (NAME_NOT_FOUND != val) {
7925 uint8_t resultCacMode = (uint8_t)val;
7926 // check whether CAC result from CB is equal to Framework set CAC mode
7927 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007928 if (pendingRequest.fwkCacMode != resultCacMode) {
7929 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007930 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007931 //Check if CAC is disabled by property
7932 if (m_cacModeDisabled) {
7933 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7934 }
7935
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007936 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007937 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7938 } else {
7939 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7940 }
7941 }
7942 }
7943
7944 // Post blob of cam_cds_data through vendor tag.
7945 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7946 uint8_t cnt = cdsInfo->num_of_streams;
7947 cam_cds_data_t cdsDataOverride;
7948 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7949 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7950 cdsDataOverride.num_of_streams = 1;
7951 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7952 uint32_t reproc_stream_id;
7953 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7954 LOGD("No reprocessible stream found, ignore cds data");
7955 } else {
7956 for (size_t i = 0; i < cnt; i++) {
7957 if (cdsInfo->cds_info[i].stream_id ==
7958 reproc_stream_id) {
7959 cdsDataOverride.cds_info[0].cds_enable =
7960 cdsInfo->cds_info[i].cds_enable;
7961 break;
7962 }
7963 }
7964 }
7965 } else {
7966 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7967 }
7968 camMetadata.update(QCAMERA3_CDS_INFO,
7969 (uint8_t *)&cdsDataOverride,
7970 sizeof(cam_cds_data_t));
7971 }
7972
7973 // Ldaf calibration data
7974 if (!mLdafCalibExist) {
7975 IF_META_AVAILABLE(uint32_t, ldafCalib,
7976 CAM_INTF_META_LDAF_EXIF, metadata) {
7977 mLdafCalibExist = true;
7978 mLdafCalib[0] = ldafCalib[0];
7979 mLdafCalib[1] = ldafCalib[1];
7980 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7981 ldafCalib[0], ldafCalib[1]);
7982 }
7983 }
7984
Thierry Strudel54dc9782017-02-15 12:12:10 -08007985 // EXIF debug data through vendor tag
7986 /*
7987 * Mobicat Mask can assume 3 values:
7988 * 1 refers to Mobicat data,
7989 * 2 refers to Stats Debug and Exif Debug Data
7990 * 3 refers to Mobicat and Stats Debug Data
7991 * We want to make sure that we are sending Exif debug data
7992 * only when Mobicat Mask is 2.
7993 */
7994 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7995 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7996 (uint8_t *)(void *)mExifParams.debug_params,
7997 sizeof(mm_jpeg_debug_exif_params_t));
7998 }
7999
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008000 // Reprocess and DDM debug data through vendor tag
8001 cam_reprocess_info_t repro_info;
8002 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008003 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8004 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008005 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008006 }
8007 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8008 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008009 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008010 }
8011 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8012 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008013 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008014 }
8015 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8016 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008017 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008018 }
8019 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8020 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008021 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008022 }
8023 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008024 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008025 }
8026 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8027 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008028 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008029 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008030 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8031 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8032 }
8033 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8034 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8035 }
8036 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8037 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008038
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008039 // INSTANT AEC MODE
8040 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8041 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8042 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8043 }
8044
Shuzhen Wange763e802016-03-31 10:24:29 -07008045 // AF scene change
8046 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8047 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8048 }
8049
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008050 // Enable ZSL
8051 if (enableZsl != nullptr) {
8052 uint8_t value = *enableZsl ?
8053 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8054 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8055 }
8056
Xu Han821ea9c2017-05-23 09:00:40 -07008057 // OIS Data
8058 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8059 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8060 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8061 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8062 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8063 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8064 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8065 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8066 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8067 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8068 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8069 }
8070
Thierry Strudel3d639192016-09-09 11:52:26 -07008071 resultMetadata = camMetadata.release();
8072 return resultMetadata;
8073}
8074
8075/*===========================================================================
8076 * FUNCTION : saveExifParams
8077 *
8078 * DESCRIPTION:
8079 *
8080 * PARAMETERS :
8081 * @metadata : metadata information from callback
8082 *
8083 * RETURN : none
8084 *
8085 *==========================================================================*/
8086void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8087{
8088 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8089 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8090 if (mExifParams.debug_params) {
8091 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8092 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8093 }
8094 }
8095 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8096 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8097 if (mExifParams.debug_params) {
8098 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8099 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8100 }
8101 }
8102 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8103 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8104 if (mExifParams.debug_params) {
8105 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8106 mExifParams.debug_params->af_debug_params_valid = TRUE;
8107 }
8108 }
8109 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8110 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8111 if (mExifParams.debug_params) {
8112 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8113 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8114 }
8115 }
8116 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8117 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8118 if (mExifParams.debug_params) {
8119 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8120 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8121 }
8122 }
8123 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8124 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8125 if (mExifParams.debug_params) {
8126 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8127 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8128 }
8129 }
8130 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8131 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8132 if (mExifParams.debug_params) {
8133 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8134 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8135 }
8136 }
8137 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8138 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8139 if (mExifParams.debug_params) {
8140 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8141 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8142 }
8143 }
8144}
8145
8146/*===========================================================================
8147 * FUNCTION : get3AExifParams
8148 *
8149 * DESCRIPTION:
8150 *
8151 * PARAMETERS : none
8152 *
8153 *
8154 * RETURN : mm_jpeg_exif_params_t
8155 *
8156 *==========================================================================*/
8157mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8158{
8159 return mExifParams;
8160}
8161
8162/*===========================================================================
8163 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8164 *
8165 * DESCRIPTION:
8166 *
8167 * PARAMETERS :
8168 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008169 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8170 * urgent metadata in a batch. Always true for
8171 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008172 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008173 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8174 * i.e. even though it doesn't map to a valid partial
8175 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008176 * RETURN : camera_metadata_t*
8177 * metadata in a format specified by fwk
8178 *==========================================================================*/
8179camera_metadata_t*
8180QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008181 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008182 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008183{
8184 CameraMetadata camMetadata;
8185 camera_metadata_t *resultMetadata;
8186
Shuzhen Wang485e2442017-08-02 12:21:08 -07008187 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008188 /* In batch mode, use empty metadata if this is not the last in batch
8189 */
8190 resultMetadata = allocate_camera_metadata(0, 0);
8191 return resultMetadata;
8192 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008193
8194 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8195 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8196 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8197 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8198 }
8199
8200 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8201 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8202 &aecTrigger->trigger, 1);
8203 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8204 &aecTrigger->trigger_id, 1);
8205 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8206 aecTrigger->trigger);
8207 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8208 aecTrigger->trigger_id);
8209 }
8210
8211 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8212 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8213 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8214 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8215 }
8216
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008217 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8218 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8219 if (NAME_NOT_FOUND != val) {
8220 uint8_t fwkAfMode = (uint8_t)val;
8221 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8222 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8223 } else {
8224 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8225 val);
8226 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008227 }
8228
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008229 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8230 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8231 af_trigger->trigger);
8232 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8233 af_trigger->trigger_id);
8234
8235 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8236 mAfTrigger = *af_trigger;
8237 uint32_t fwk_AfState = (uint32_t) *afState;
8238
8239 // If this is the result for a new trigger, check if there is new early
8240 // af state. If there is, use the last af state for all results
8241 // preceding current partial frame number.
8242 for (auto & pendingRequest : mPendingRequestsList) {
8243 if (pendingRequest.frame_number < frame_number) {
8244 pendingRequest.focusStateValid = true;
8245 pendingRequest.focusState = fwk_AfState;
8246 } else if (pendingRequest.frame_number == frame_number) {
8247 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8248 // Check if early AF state for trigger exists. If yes, send AF state as
8249 // partial result for better latency.
8250 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8251 pendingRequest.focusStateSent = true;
8252 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8253 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8254 frame_number, fwkEarlyAfState);
8255 }
8256 }
8257 }
8258 }
8259 }
8260 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8261 &mAfTrigger.trigger, 1);
8262 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8263
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008264 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8265 /*af regions*/
8266 int32_t afRegions[REGIONS_TUPLE_COUNT];
8267 // Adjust crop region from sensor output coordinate system to active
8268 // array coordinate system.
8269 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8270 hAfRegions->rect.width, hAfRegions->rect.height);
8271
8272 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8273 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8274 REGIONS_TUPLE_COUNT);
8275 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8276 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8277 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8278 hAfRegions->rect.height);
8279 }
8280
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008281 // AF region confidence
8282 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8283 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8284 }
8285
Thierry Strudel3d639192016-09-09 11:52:26 -07008286 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8287 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8288 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8289 if (NAME_NOT_FOUND != val) {
8290 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8291 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8292 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8293 } else {
8294 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8295 }
8296 }
8297
8298 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8299 uint32_t aeMode = CAM_AE_MODE_MAX;
8300 int32_t flashMode = CAM_FLASH_MODE_MAX;
8301 int32_t redeye = -1;
8302 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8303 aeMode = *pAeMode;
8304 }
8305 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8306 flashMode = *pFlashMode;
8307 }
8308 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8309 redeye = *pRedeye;
8310 }
8311
8312 if (1 == redeye) {
8313 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8314 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8315 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8316 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8317 flashMode);
8318 if (NAME_NOT_FOUND != val) {
8319 fwk_aeMode = (uint8_t)val;
8320 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8321 } else {
8322 LOGE("Unsupported flash mode %d", flashMode);
8323 }
8324 } else if (aeMode == CAM_AE_MODE_ON) {
8325 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8326 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8327 } else if (aeMode == CAM_AE_MODE_OFF) {
8328 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8329 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008330 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8331 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8332 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008333 } else {
8334 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8335 "flashMode:%d, aeMode:%u!!!",
8336 redeye, flashMode, aeMode);
8337 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008338 if (mInstantAEC) {
8339 // Increment frame Idx count untill a bound reached for instant AEC.
8340 mInstantAecFrameIdxCount++;
8341 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8342 CAM_INTF_META_AEC_INFO, metadata) {
8343 LOGH("ae_params->settled = %d",ae_params->settled);
8344 // If AEC settled, or if number of frames reached bound value,
8345 // should reset instant AEC.
8346 if (ae_params->settled ||
8347 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8348 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8349 mInstantAEC = false;
8350 mResetInstantAEC = true;
8351 mInstantAecFrameIdxCount = 0;
8352 }
8353 }
8354 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008355 resultMetadata = camMetadata.release();
8356 return resultMetadata;
8357}
8358
8359/*===========================================================================
8360 * FUNCTION : dumpMetadataToFile
8361 *
8362 * DESCRIPTION: Dumps tuning metadata to file system
8363 *
8364 * PARAMETERS :
8365 * @meta : tuning metadata
8366 * @dumpFrameCount : current dump frame count
8367 * @enabled : Enable mask
8368 *
8369 *==========================================================================*/
8370void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8371 uint32_t &dumpFrameCount,
8372 bool enabled,
8373 const char *type,
8374 uint32_t frameNumber)
8375{
8376 //Some sanity checks
8377 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8378 LOGE("Tuning sensor data size bigger than expected %d: %d",
8379 meta.tuning_sensor_data_size,
8380 TUNING_SENSOR_DATA_MAX);
8381 return;
8382 }
8383
8384 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8385 LOGE("Tuning VFE data size bigger than expected %d: %d",
8386 meta.tuning_vfe_data_size,
8387 TUNING_VFE_DATA_MAX);
8388 return;
8389 }
8390
8391 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8392 LOGE("Tuning CPP data size bigger than expected %d: %d",
8393 meta.tuning_cpp_data_size,
8394 TUNING_CPP_DATA_MAX);
8395 return;
8396 }
8397
8398 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8399 LOGE("Tuning CAC data size bigger than expected %d: %d",
8400 meta.tuning_cac_data_size,
8401 TUNING_CAC_DATA_MAX);
8402 return;
8403 }
8404 //
8405
8406 if(enabled){
8407 char timeBuf[FILENAME_MAX];
8408 char buf[FILENAME_MAX];
8409 memset(buf, 0, sizeof(buf));
8410 memset(timeBuf, 0, sizeof(timeBuf));
8411 time_t current_time;
8412 struct tm * timeinfo;
8413 time (&current_time);
8414 timeinfo = localtime (&current_time);
8415 if (timeinfo != NULL) {
8416 strftime (timeBuf, sizeof(timeBuf),
8417 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8418 }
8419 String8 filePath(timeBuf);
8420 snprintf(buf,
8421 sizeof(buf),
8422 "%dm_%s_%d.bin",
8423 dumpFrameCount,
8424 type,
8425 frameNumber);
8426 filePath.append(buf);
8427 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8428 if (file_fd >= 0) {
8429 ssize_t written_len = 0;
8430 meta.tuning_data_version = TUNING_DATA_VERSION;
8431 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8432 written_len += write(file_fd, data, sizeof(uint32_t));
8433 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8434 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8435 written_len += write(file_fd, data, sizeof(uint32_t));
8436 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8437 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8438 written_len += write(file_fd, data, sizeof(uint32_t));
8439 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8440 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8441 written_len += write(file_fd, data, sizeof(uint32_t));
8442 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8443 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8444 written_len += write(file_fd, data, sizeof(uint32_t));
8445 meta.tuning_mod3_data_size = 0;
8446 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8447 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8448 written_len += write(file_fd, data, sizeof(uint32_t));
8449 size_t total_size = meta.tuning_sensor_data_size;
8450 data = (void *)((uint8_t *)&meta.data);
8451 written_len += write(file_fd, data, total_size);
8452 total_size = meta.tuning_vfe_data_size;
8453 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8454 written_len += write(file_fd, data, total_size);
8455 total_size = meta.tuning_cpp_data_size;
8456 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8457 written_len += write(file_fd, data, total_size);
8458 total_size = meta.tuning_cac_data_size;
8459 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8460 written_len += write(file_fd, data, total_size);
8461 close(file_fd);
8462 }else {
8463 LOGE("fail to open file for metadata dumping");
8464 }
8465 }
8466}
8467
8468/*===========================================================================
8469 * FUNCTION : cleanAndSortStreamInfo
8470 *
8471 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8472 * and sort them such that raw stream is at the end of the list
8473 * This is a workaround for camera daemon constraint.
8474 *
8475 * PARAMETERS : None
8476 *
8477 *==========================================================================*/
8478void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8479{
8480 List<stream_info_t *> newStreamInfo;
8481
8482 /*clean up invalid streams*/
8483 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8484 it != mStreamInfo.end();) {
8485 if(((*it)->status) == INVALID){
8486 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8487 delete channel;
8488 free(*it);
8489 it = mStreamInfo.erase(it);
8490 } else {
8491 it++;
8492 }
8493 }
8494
8495 // Move preview/video/callback/snapshot streams into newList
8496 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8497 it != mStreamInfo.end();) {
8498 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8499 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8500 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8501 newStreamInfo.push_back(*it);
8502 it = mStreamInfo.erase(it);
8503 } else
8504 it++;
8505 }
8506 // Move raw streams into newList
8507 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8508 it != mStreamInfo.end();) {
8509 newStreamInfo.push_back(*it);
8510 it = mStreamInfo.erase(it);
8511 }
8512
8513 mStreamInfo = newStreamInfo;
8514}
8515
8516/*===========================================================================
8517 * FUNCTION : extractJpegMetadata
8518 *
8519 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8520 * JPEG metadata is cached in HAL, and return as part of capture
8521 * result when metadata is returned from camera daemon.
8522 *
8523 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8524 * @request: capture request
8525 *
8526 *==========================================================================*/
8527void QCamera3HardwareInterface::extractJpegMetadata(
8528 CameraMetadata& jpegMetadata,
8529 const camera3_capture_request_t *request)
8530{
8531 CameraMetadata frame_settings;
8532 frame_settings = request->settings;
8533
8534 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8535 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8536 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8537 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8538
8539 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8540 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8541 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8542 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8543
8544 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8545 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8546 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8547 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8548
8549 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8550 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8551 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8552 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8553
8554 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8555 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8556 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8557 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8558
8559 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8560 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8561 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8562 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8563
8564 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8565 int32_t thumbnail_size[2];
8566 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8567 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8568 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8569 int32_t orientation =
8570 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008571 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008572 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8573 int32_t temp;
8574 temp = thumbnail_size[0];
8575 thumbnail_size[0] = thumbnail_size[1];
8576 thumbnail_size[1] = temp;
8577 }
8578 }
8579 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8580 thumbnail_size,
8581 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8582 }
8583
8584}
8585
8586/*===========================================================================
8587 * FUNCTION : convertToRegions
8588 *
8589 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8590 *
8591 * PARAMETERS :
8592 * @rect : cam_rect_t struct to convert
8593 * @region : int32_t destination array
8594 * @weight : if we are converting from cam_area_t, weight is valid
8595 * else weight = -1
8596 *
8597 *==========================================================================*/
8598void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8599 int32_t *region, int weight)
8600{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008601 region[FACE_LEFT] = rect.left;
8602 region[FACE_TOP] = rect.top;
8603 region[FACE_RIGHT] = rect.left + rect.width;
8604 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008605 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008606 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008607 }
8608}
8609
8610/*===========================================================================
8611 * FUNCTION : convertFromRegions
8612 *
8613 * DESCRIPTION: helper method to convert from array to cam_rect_t
8614 *
8615 * PARAMETERS :
8616 * @rect : cam_rect_t struct to convert
8617 * @region : int32_t destination array
8618 * @weight : if we are converting from cam_area_t, weight is valid
8619 * else weight = -1
8620 *
8621 *==========================================================================*/
8622void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008623 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008624{
Thierry Strudel3d639192016-09-09 11:52:26 -07008625 int32_t x_min = frame_settings.find(tag).data.i32[0];
8626 int32_t y_min = frame_settings.find(tag).data.i32[1];
8627 int32_t x_max = frame_settings.find(tag).data.i32[2];
8628 int32_t y_max = frame_settings.find(tag).data.i32[3];
8629 roi.weight = frame_settings.find(tag).data.i32[4];
8630 roi.rect.left = x_min;
8631 roi.rect.top = y_min;
8632 roi.rect.width = x_max - x_min;
8633 roi.rect.height = y_max - y_min;
8634}
8635
8636/*===========================================================================
8637 * FUNCTION : resetIfNeededROI
8638 *
8639 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8640 * crop region
8641 *
8642 * PARAMETERS :
8643 * @roi : cam_area_t struct to resize
8644 * @scalerCropRegion : cam_crop_region_t region to compare against
8645 *
8646 *
8647 *==========================================================================*/
8648bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8649 const cam_crop_region_t* scalerCropRegion)
8650{
8651 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8652 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8653 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8654 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8655
8656 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8657 * without having this check the calculations below to validate if the roi
8658 * is inside scalar crop region will fail resulting in the roi not being
8659 * reset causing algorithm to continue to use stale roi window
8660 */
8661 if (roi->weight == 0) {
8662 return true;
8663 }
8664
8665 if ((roi_x_max < scalerCropRegion->left) ||
8666 // right edge of roi window is left of scalar crop's left edge
8667 (roi_y_max < scalerCropRegion->top) ||
8668 // bottom edge of roi window is above scalar crop's top edge
8669 (roi->rect.left > crop_x_max) ||
8670 // left edge of roi window is beyond(right) of scalar crop's right edge
8671 (roi->rect.top > crop_y_max)){
8672 // top edge of roi windo is above scalar crop's top edge
8673 return false;
8674 }
8675 if (roi->rect.left < scalerCropRegion->left) {
8676 roi->rect.left = scalerCropRegion->left;
8677 }
8678 if (roi->rect.top < scalerCropRegion->top) {
8679 roi->rect.top = scalerCropRegion->top;
8680 }
8681 if (roi_x_max > crop_x_max) {
8682 roi_x_max = crop_x_max;
8683 }
8684 if (roi_y_max > crop_y_max) {
8685 roi_y_max = crop_y_max;
8686 }
8687 roi->rect.width = roi_x_max - roi->rect.left;
8688 roi->rect.height = roi_y_max - roi->rect.top;
8689 return true;
8690}
8691
8692/*===========================================================================
8693 * FUNCTION : convertLandmarks
8694 *
8695 * DESCRIPTION: helper method to extract the landmarks from face detection info
8696 *
8697 * PARAMETERS :
8698 * @landmark_data : input landmark data to be converted
8699 * @landmarks : int32_t destination array
8700 *
8701 *
8702 *==========================================================================*/
8703void QCamera3HardwareInterface::convertLandmarks(
8704 cam_face_landmarks_info_t landmark_data,
8705 int32_t *landmarks)
8706{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008707 if (landmark_data.is_left_eye_valid) {
8708 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8709 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8710 } else {
8711 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8712 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8713 }
8714
8715 if (landmark_data.is_right_eye_valid) {
8716 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8717 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8718 } else {
8719 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8720 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8721 }
8722
8723 if (landmark_data.is_mouth_valid) {
8724 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8725 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8726 } else {
8727 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8728 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8729 }
8730}
8731
8732/*===========================================================================
8733 * FUNCTION : setInvalidLandmarks
8734 *
8735 * DESCRIPTION: helper method to set invalid landmarks
8736 *
8737 * PARAMETERS :
8738 * @landmarks : int32_t destination array
8739 *
8740 *
8741 *==========================================================================*/
8742void QCamera3HardwareInterface::setInvalidLandmarks(
8743 int32_t *landmarks)
8744{
8745 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8746 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8747 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8748 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8749 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8750 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008751}
8752
8753#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008754
8755/*===========================================================================
8756 * FUNCTION : getCapabilities
8757 *
8758 * DESCRIPTION: query camera capability from back-end
8759 *
8760 * PARAMETERS :
8761 * @ops : mm-interface ops structure
8762 * @cam_handle : camera handle for which we need capability
8763 *
8764 * RETURN : ptr type of capability structure
8765 * capability for success
8766 * NULL for failure
8767 *==========================================================================*/
8768cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8769 uint32_t cam_handle)
8770{
8771 int rc = NO_ERROR;
8772 QCamera3HeapMemory *capabilityHeap = NULL;
8773 cam_capability_t *cap_ptr = NULL;
8774
8775 if (ops == NULL) {
8776 LOGE("Invalid arguments");
8777 return NULL;
8778 }
8779
8780 capabilityHeap = new QCamera3HeapMemory(1);
8781 if (capabilityHeap == NULL) {
8782 LOGE("creation of capabilityHeap failed");
8783 return NULL;
8784 }
8785
8786 /* Allocate memory for capability buffer */
8787 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8788 if(rc != OK) {
8789 LOGE("No memory for cappability");
8790 goto allocate_failed;
8791 }
8792
8793 /* Map memory for capability buffer */
8794 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8795
8796 rc = ops->map_buf(cam_handle,
8797 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8798 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8799 if(rc < 0) {
8800 LOGE("failed to map capability buffer");
8801 rc = FAILED_TRANSACTION;
8802 goto map_failed;
8803 }
8804
8805 /* Query Capability */
8806 rc = ops->query_capability(cam_handle);
8807 if(rc < 0) {
8808 LOGE("failed to query capability");
8809 rc = FAILED_TRANSACTION;
8810 goto query_failed;
8811 }
8812
8813 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8814 if (cap_ptr == NULL) {
8815 LOGE("out of memory");
8816 rc = NO_MEMORY;
8817 goto query_failed;
8818 }
8819
8820 memset(cap_ptr, 0, sizeof(cam_capability_t));
8821 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8822
8823 int index;
8824 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8825 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8826 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8827 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8828 }
8829
8830query_failed:
8831 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8832map_failed:
8833 capabilityHeap->deallocate();
8834allocate_failed:
8835 delete capabilityHeap;
8836
8837 if (rc != NO_ERROR) {
8838 return NULL;
8839 } else {
8840 return cap_ptr;
8841 }
8842}
8843
Thierry Strudel3d639192016-09-09 11:52:26 -07008844/*===========================================================================
8845 * FUNCTION : initCapabilities
8846 *
8847 * DESCRIPTION: initialize camera capabilities in static data struct
8848 *
8849 * PARAMETERS :
8850 * @cameraId : camera Id
8851 *
8852 * RETURN : int32_t type of status
8853 * NO_ERROR -- success
8854 * none-zero failure code
8855 *==========================================================================*/
8856int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8857{
8858 int rc = 0;
8859 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008860 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008861
8862 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8863 if (rc) {
8864 LOGE("camera_open failed. rc = %d", rc);
8865 goto open_failed;
8866 }
8867 if (!cameraHandle) {
8868 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8869 goto open_failed;
8870 }
8871
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008872 handle = get_main_camera_handle(cameraHandle->camera_handle);
8873 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8874 if (gCamCapability[cameraId] == NULL) {
8875 rc = FAILED_TRANSACTION;
8876 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008877 }
8878
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008879 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008880 if (is_dual_camera_by_idx(cameraId)) {
8881 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8882 gCamCapability[cameraId]->aux_cam_cap =
8883 getCapabilities(cameraHandle->ops, handle);
8884 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8885 rc = FAILED_TRANSACTION;
8886 free(gCamCapability[cameraId]);
8887 goto failed_op;
8888 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008889
8890 // Copy the main camera capability to main_cam_cap struct
8891 gCamCapability[cameraId]->main_cam_cap =
8892 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8893 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8894 LOGE("out of memory");
8895 rc = NO_MEMORY;
8896 goto failed_op;
8897 }
8898 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8899 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008900 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008901failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008902 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8903 cameraHandle = NULL;
8904open_failed:
8905 return rc;
8906}
8907
8908/*==========================================================================
8909 * FUNCTION : get3Aversion
8910 *
8911 * DESCRIPTION: get the Q3A S/W version
8912 *
8913 * PARAMETERS :
8914 * @sw_version: Reference of Q3A structure which will hold version info upon
8915 * return
8916 *
8917 * RETURN : None
8918 *
8919 *==========================================================================*/
8920void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8921{
8922 if(gCamCapability[mCameraId])
8923 sw_version = gCamCapability[mCameraId]->q3a_version;
8924 else
8925 LOGE("Capability structure NULL!");
8926}
8927
8928
8929/*===========================================================================
8930 * FUNCTION : initParameters
8931 *
8932 * DESCRIPTION: initialize camera parameters
8933 *
8934 * PARAMETERS :
8935 *
8936 * RETURN : int32_t type of status
8937 * NO_ERROR -- success
8938 * none-zero failure code
8939 *==========================================================================*/
8940int QCamera3HardwareInterface::initParameters()
8941{
8942 int rc = 0;
8943
8944 //Allocate Set Param Buffer
8945 mParamHeap = new QCamera3HeapMemory(1);
8946 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8947 if(rc != OK) {
8948 rc = NO_MEMORY;
8949 LOGE("Failed to allocate SETPARM Heap memory");
8950 delete mParamHeap;
8951 mParamHeap = NULL;
8952 return rc;
8953 }
8954
8955 //Map memory for parameters buffer
8956 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8957 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8958 mParamHeap->getFd(0),
8959 sizeof(metadata_buffer_t),
8960 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8961 if(rc < 0) {
8962 LOGE("failed to map SETPARM buffer");
8963 rc = FAILED_TRANSACTION;
8964 mParamHeap->deallocate();
8965 delete mParamHeap;
8966 mParamHeap = NULL;
8967 return rc;
8968 }
8969
8970 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8971
8972 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8973 return rc;
8974}
8975
8976/*===========================================================================
8977 * FUNCTION : deinitParameters
8978 *
8979 * DESCRIPTION: de-initialize camera parameters
8980 *
8981 * PARAMETERS :
8982 *
8983 * RETURN : NONE
8984 *==========================================================================*/
8985void QCamera3HardwareInterface::deinitParameters()
8986{
8987 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8988 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8989
8990 mParamHeap->deallocate();
8991 delete mParamHeap;
8992 mParamHeap = NULL;
8993
8994 mParameters = NULL;
8995
8996 free(mPrevParameters);
8997 mPrevParameters = NULL;
8998}
8999
9000/*===========================================================================
9001 * FUNCTION : calcMaxJpegSize
9002 *
9003 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9004 *
9005 * PARAMETERS :
9006 *
9007 * RETURN : max_jpeg_size
9008 *==========================================================================*/
9009size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9010{
9011 size_t max_jpeg_size = 0;
9012 size_t temp_width, temp_height;
9013 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9014 MAX_SIZES_CNT);
9015 for (size_t i = 0; i < count; i++) {
9016 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9017 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9018 if (temp_width * temp_height > max_jpeg_size ) {
9019 max_jpeg_size = temp_width * temp_height;
9020 }
9021 }
9022 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9023 return max_jpeg_size;
9024}
9025
9026/*===========================================================================
9027 * FUNCTION : getMaxRawSize
9028 *
9029 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9030 *
9031 * PARAMETERS :
9032 *
9033 * RETURN : Largest supported Raw Dimension
9034 *==========================================================================*/
9035cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9036{
9037 int max_width = 0;
9038 cam_dimension_t maxRawSize;
9039
9040 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9041 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9042 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9043 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9044 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9045 }
9046 }
9047 return maxRawSize;
9048}
9049
9050
9051/*===========================================================================
9052 * FUNCTION : calcMaxJpegDim
9053 *
9054 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9055 *
9056 * PARAMETERS :
9057 *
9058 * RETURN : max_jpeg_dim
9059 *==========================================================================*/
9060cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9061{
9062 cam_dimension_t max_jpeg_dim;
9063 cam_dimension_t curr_jpeg_dim;
9064 max_jpeg_dim.width = 0;
9065 max_jpeg_dim.height = 0;
9066 curr_jpeg_dim.width = 0;
9067 curr_jpeg_dim.height = 0;
9068 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9069 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9070 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9071 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9072 max_jpeg_dim.width * max_jpeg_dim.height ) {
9073 max_jpeg_dim.width = curr_jpeg_dim.width;
9074 max_jpeg_dim.height = curr_jpeg_dim.height;
9075 }
9076 }
9077 return max_jpeg_dim;
9078}
9079
9080/*===========================================================================
9081 * FUNCTION : addStreamConfig
9082 *
9083 * DESCRIPTION: adds the stream configuration to the array
9084 *
9085 * PARAMETERS :
9086 * @available_stream_configs : pointer to stream configuration array
9087 * @scalar_format : scalar format
9088 * @dim : configuration dimension
9089 * @config_type : input or output configuration type
9090 *
9091 * RETURN : NONE
9092 *==========================================================================*/
9093void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9094 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9095{
9096 available_stream_configs.add(scalar_format);
9097 available_stream_configs.add(dim.width);
9098 available_stream_configs.add(dim.height);
9099 available_stream_configs.add(config_type);
9100}
9101
9102/*===========================================================================
9103 * FUNCTION : suppportBurstCapture
9104 *
9105 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9106 *
9107 * PARAMETERS :
9108 * @cameraId : camera Id
9109 *
9110 * RETURN : true if camera supports BURST_CAPTURE
9111 * false otherwise
9112 *==========================================================================*/
9113bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9114{
9115 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9116 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9117 const int32_t highResWidth = 3264;
9118 const int32_t highResHeight = 2448;
9119
9120 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9121 // Maximum resolution images cannot be captured at >= 10fps
9122 // -> not supporting BURST_CAPTURE
9123 return false;
9124 }
9125
9126 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9127 // Maximum resolution images can be captured at >= 20fps
9128 // --> supporting BURST_CAPTURE
9129 return true;
9130 }
9131
9132 // Find the smallest highRes resolution, or largest resolution if there is none
9133 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9134 MAX_SIZES_CNT);
9135 size_t highRes = 0;
9136 while ((highRes + 1 < totalCnt) &&
9137 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9138 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9139 highResWidth * highResHeight)) {
9140 highRes++;
9141 }
9142 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9143 return true;
9144 } else {
9145 return false;
9146 }
9147}
9148
9149/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009150 * FUNCTION : getPDStatIndex
9151 *
9152 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9153 *
9154 * PARAMETERS :
9155 * @caps : camera capabilities
9156 *
9157 * RETURN : int32_t type
9158 * non-negative - on success
9159 * -1 - on failure
9160 *==========================================================================*/
9161int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9162 if (nullptr == caps) {
9163 return -1;
9164 }
9165
9166 uint32_t metaRawCount = caps->meta_raw_channel_count;
9167 int32_t ret = -1;
9168 for (size_t i = 0; i < metaRawCount; i++) {
9169 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9170 ret = i;
9171 break;
9172 }
9173 }
9174
9175 return ret;
9176}
9177
9178/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009179 * FUNCTION : initStaticMetadata
9180 *
9181 * DESCRIPTION: initialize the static metadata
9182 *
9183 * PARAMETERS :
9184 * @cameraId : camera Id
9185 *
9186 * RETURN : int32_t type of status
9187 * 0 -- success
9188 * non-zero failure code
9189 *==========================================================================*/
9190int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9191{
9192 int rc = 0;
9193 CameraMetadata staticInfo;
9194 size_t count = 0;
9195 bool limitedDevice = false;
9196 char prop[PROPERTY_VALUE_MAX];
9197 bool supportBurst = false;
9198
9199 supportBurst = supportBurstCapture(cameraId);
9200
9201 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9202 * guaranteed or if min fps of max resolution is less than 20 fps, its
9203 * advertised as limited device*/
9204 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9205 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9206 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9207 !supportBurst;
9208
9209 uint8_t supportedHwLvl = limitedDevice ?
9210 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009211#ifndef USE_HAL_3_3
9212 // LEVEL_3 - This device will support level 3.
9213 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9214#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009215 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009216#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009217
9218 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9219 &supportedHwLvl, 1);
9220
9221 bool facingBack = false;
9222 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9223 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9224 facingBack = true;
9225 }
9226 /*HAL 3 only*/
9227 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9228 &gCamCapability[cameraId]->min_focus_distance, 1);
9229
9230 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9231 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9232
9233 /*should be using focal lengths but sensor doesn't provide that info now*/
9234 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9235 &gCamCapability[cameraId]->focal_length,
9236 1);
9237
9238 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9239 gCamCapability[cameraId]->apertures,
9240 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9241
9242 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9243 gCamCapability[cameraId]->filter_densities,
9244 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9245
9246
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009247 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9248 size_t mode_count =
9249 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9250 for (size_t i = 0; i < mode_count; i++) {
9251 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9252 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009253 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009254 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009255
9256 int32_t lens_shading_map_size[] = {
9257 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9258 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9259 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9260 lens_shading_map_size,
9261 sizeof(lens_shading_map_size)/sizeof(int32_t));
9262
9263 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9264 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9265
9266 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9267 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9268
9269 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9270 &gCamCapability[cameraId]->max_frame_duration, 1);
9271
9272 camera_metadata_rational baseGainFactor = {
9273 gCamCapability[cameraId]->base_gain_factor.numerator,
9274 gCamCapability[cameraId]->base_gain_factor.denominator};
9275 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9276 &baseGainFactor, 1);
9277
9278 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9279 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9280
9281 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9282 gCamCapability[cameraId]->pixel_array_size.height};
9283 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9284 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9285
9286 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9287 gCamCapability[cameraId]->active_array_size.top,
9288 gCamCapability[cameraId]->active_array_size.width,
9289 gCamCapability[cameraId]->active_array_size.height};
9290 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9291 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9292
9293 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9294 &gCamCapability[cameraId]->white_level, 1);
9295
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009296 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9297 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9298 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009299 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009300 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009301
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009302#ifndef USE_HAL_3_3
9303 bool hasBlackRegions = false;
9304 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9305 LOGW("black_region_count: %d is bounded to %d",
9306 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9307 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9308 }
9309 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9310 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9311 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9312 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9313 }
9314 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9315 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9316 hasBlackRegions = true;
9317 }
9318#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009319 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9320 &gCamCapability[cameraId]->flash_charge_duration, 1);
9321
9322 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9323 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9324
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009325 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9326 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9327 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009328 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9329 &timestampSource, 1);
9330
Thierry Strudel54dc9782017-02-15 12:12:10 -08009331 //update histogram vendor data
9332 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009333 &gCamCapability[cameraId]->histogram_size, 1);
9334
Thierry Strudel54dc9782017-02-15 12:12:10 -08009335 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009336 &gCamCapability[cameraId]->max_histogram_count, 1);
9337
Shuzhen Wang14415f52016-11-16 18:26:18 -08009338 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9339 //so that app can request fewer number of bins than the maximum supported.
9340 std::vector<int32_t> histBins;
9341 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9342 histBins.push_back(maxHistBins);
9343 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9344 (maxHistBins & 0x1) == 0) {
9345 histBins.push_back(maxHistBins >> 1);
9346 maxHistBins >>= 1;
9347 }
9348 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9349 histBins.data(), histBins.size());
9350
Thierry Strudel3d639192016-09-09 11:52:26 -07009351 int32_t sharpness_map_size[] = {
9352 gCamCapability[cameraId]->sharpness_map_size.width,
9353 gCamCapability[cameraId]->sharpness_map_size.height};
9354
9355 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9356 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9357
9358 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9359 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9360
Emilian Peev0f3c3162017-03-15 12:57:46 +00009361 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9362 if (0 <= indexPD) {
9363 // Advertise PD stats data as part of the Depth capabilities
9364 int32_t depthWidth =
9365 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9366 int32_t depthHeight =
9367 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009368 int32_t depthStride =
9369 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009370 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9371 assert(0 < depthSamplesCount);
9372 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9373 &depthSamplesCount, 1);
9374
9375 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9376 depthHeight,
9377 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9378 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9379 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9380 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9381 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9382
9383 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9384 depthHeight, 33333333,
9385 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9386 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9387 depthMinDuration,
9388 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9389
9390 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9391 depthHeight, 0,
9392 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9393 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9394 depthStallDuration,
9395 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9396
9397 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9398 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009399
9400 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9401 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9402 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009403 }
9404
Thierry Strudel3d639192016-09-09 11:52:26 -07009405 int32_t scalar_formats[] = {
9406 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9407 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9408 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9409 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9410 HAL_PIXEL_FORMAT_RAW10,
9411 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009412 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9413 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9414 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009415
9416 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9417 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9418 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9419 count, MAX_SIZES_CNT, available_processed_sizes);
9420 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9421 available_processed_sizes, count * 2);
9422
9423 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9424 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9425 makeTable(gCamCapability[cameraId]->raw_dim,
9426 count, MAX_SIZES_CNT, available_raw_sizes);
9427 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9428 available_raw_sizes, count * 2);
9429
9430 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9431 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9432 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9433 count, MAX_SIZES_CNT, available_fps_ranges);
9434 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9435 available_fps_ranges, count * 2);
9436
9437 camera_metadata_rational exposureCompensationStep = {
9438 gCamCapability[cameraId]->exp_compensation_step.numerator,
9439 gCamCapability[cameraId]->exp_compensation_step.denominator};
9440 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9441 &exposureCompensationStep, 1);
9442
9443 Vector<uint8_t> availableVstabModes;
9444 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9445 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009446 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009447 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009448 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009449 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009450 count = IS_TYPE_MAX;
9451 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9452 for (size_t i = 0; i < count; i++) {
9453 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9454 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9455 eisSupported = true;
9456 break;
9457 }
9458 }
9459 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009460 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9461 }
9462 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9463 availableVstabModes.array(), availableVstabModes.size());
9464
9465 /*HAL 1 and HAL 3 common*/
9466 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9467 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9468 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009469 // Cap the max zoom to the max preferred value
9470 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009471 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9472 &maxZoom, 1);
9473
9474 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9475 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9476
9477 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9478 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9479 max3aRegions[2] = 0; /* AF not supported */
9480 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9481 max3aRegions, 3);
9482
9483 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9484 memset(prop, 0, sizeof(prop));
9485 property_get("persist.camera.facedetect", prop, "1");
9486 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9487 LOGD("Support face detection mode: %d",
9488 supportedFaceDetectMode);
9489
9490 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009491 /* support mode should be OFF if max number of face is 0 */
9492 if (maxFaces <= 0) {
9493 supportedFaceDetectMode = 0;
9494 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009495 Vector<uint8_t> availableFaceDetectModes;
9496 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9497 if (supportedFaceDetectMode == 1) {
9498 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9499 } else if (supportedFaceDetectMode == 2) {
9500 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9501 } else if (supportedFaceDetectMode == 3) {
9502 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9503 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9504 } else {
9505 maxFaces = 0;
9506 }
9507 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9508 availableFaceDetectModes.array(),
9509 availableFaceDetectModes.size());
9510 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9511 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009512 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9513 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9514 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009515
9516 int32_t exposureCompensationRange[] = {
9517 gCamCapability[cameraId]->exposure_compensation_min,
9518 gCamCapability[cameraId]->exposure_compensation_max};
9519 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9520 exposureCompensationRange,
9521 sizeof(exposureCompensationRange)/sizeof(int32_t));
9522
9523 uint8_t lensFacing = (facingBack) ?
9524 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9525 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9526
9527 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9528 available_thumbnail_sizes,
9529 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9530
9531 /*all sizes will be clubbed into this tag*/
9532 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9533 /*android.scaler.availableStreamConfigurations*/
9534 Vector<int32_t> available_stream_configs;
9535 cam_dimension_t active_array_dim;
9536 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9537 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009538
9539 /*advertise list of input dimensions supported based on below property.
9540 By default all sizes upto 5MP will be advertised.
9541 Note that the setprop resolution format should be WxH.
9542 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9543 To list all supported sizes, setprop needs to be set with "0x0" */
9544 cam_dimension_t minInputSize = {2592,1944}; //5MP
9545 memset(prop, 0, sizeof(prop));
9546 property_get("persist.camera.input.minsize", prop, "2592x1944");
9547 if (strlen(prop) > 0) {
9548 char *saveptr = NULL;
9549 char *token = strtok_r(prop, "x", &saveptr);
9550 if (token != NULL) {
9551 minInputSize.width = atoi(token);
9552 }
9553 token = strtok_r(NULL, "x", &saveptr);
9554 if (token != NULL) {
9555 minInputSize.height = atoi(token);
9556 }
9557 }
9558
Thierry Strudel3d639192016-09-09 11:52:26 -07009559 /* Add input/output stream configurations for each scalar formats*/
9560 for (size_t j = 0; j < scalar_formats_count; j++) {
9561 switch (scalar_formats[j]) {
9562 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9563 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9564 case HAL_PIXEL_FORMAT_RAW10:
9565 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9566 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9567 addStreamConfig(available_stream_configs, scalar_formats[j],
9568 gCamCapability[cameraId]->raw_dim[i],
9569 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9570 }
9571 break;
9572 case HAL_PIXEL_FORMAT_BLOB:
9573 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9574 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9575 addStreamConfig(available_stream_configs, scalar_formats[j],
9576 gCamCapability[cameraId]->picture_sizes_tbl[i],
9577 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9578 }
9579 break;
9580 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9581 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9582 default:
9583 cam_dimension_t largest_picture_size;
9584 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9585 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9586 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9587 addStreamConfig(available_stream_configs, scalar_formats[j],
9588 gCamCapability[cameraId]->picture_sizes_tbl[i],
9589 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009590 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009591 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9592 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009593 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9594 >= minInputSize.width) || (gCamCapability[cameraId]->
9595 picture_sizes_tbl[i].height >= minInputSize.height)) {
9596 addStreamConfig(available_stream_configs, scalar_formats[j],
9597 gCamCapability[cameraId]->picture_sizes_tbl[i],
9598 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9599 }
9600 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009601 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009602
Thierry Strudel3d639192016-09-09 11:52:26 -07009603 break;
9604 }
9605 }
9606
9607 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9608 available_stream_configs.array(), available_stream_configs.size());
9609 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9610 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9611
9612 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9613 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9614
9615 /* android.scaler.availableMinFrameDurations */
9616 Vector<int64_t> available_min_durations;
9617 for (size_t j = 0; j < scalar_formats_count; j++) {
9618 switch (scalar_formats[j]) {
9619 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9620 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9621 case HAL_PIXEL_FORMAT_RAW10:
9622 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9623 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9624 available_min_durations.add(scalar_formats[j]);
9625 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9626 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9627 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9628 }
9629 break;
9630 default:
9631 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9632 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9633 available_min_durations.add(scalar_formats[j]);
9634 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9635 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9636 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9637 }
9638 break;
9639 }
9640 }
9641 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9642 available_min_durations.array(), available_min_durations.size());
9643
9644 Vector<int32_t> available_hfr_configs;
9645 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9646 int32_t fps = 0;
9647 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9648 case CAM_HFR_MODE_60FPS:
9649 fps = 60;
9650 break;
9651 case CAM_HFR_MODE_90FPS:
9652 fps = 90;
9653 break;
9654 case CAM_HFR_MODE_120FPS:
9655 fps = 120;
9656 break;
9657 case CAM_HFR_MODE_150FPS:
9658 fps = 150;
9659 break;
9660 case CAM_HFR_MODE_180FPS:
9661 fps = 180;
9662 break;
9663 case CAM_HFR_MODE_210FPS:
9664 fps = 210;
9665 break;
9666 case CAM_HFR_MODE_240FPS:
9667 fps = 240;
9668 break;
9669 case CAM_HFR_MODE_480FPS:
9670 fps = 480;
9671 break;
9672 case CAM_HFR_MODE_OFF:
9673 case CAM_HFR_MODE_MAX:
9674 default:
9675 break;
9676 }
9677
9678 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9679 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9680 /* For each HFR frame rate, need to advertise one variable fps range
9681 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9682 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9683 * set by the app. When video recording is started, [120, 120] is
9684 * set. This way sensor configuration does not change when recording
9685 * is started */
9686
9687 /* (width, height, fps_min, fps_max, batch_size_max) */
9688 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9689 j < MAX_SIZES_CNT; j++) {
9690 available_hfr_configs.add(
9691 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9692 available_hfr_configs.add(
9693 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9694 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9695 available_hfr_configs.add(fps);
9696 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9697
9698 /* (width, height, fps_min, fps_max, batch_size_max) */
9699 available_hfr_configs.add(
9700 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9701 available_hfr_configs.add(
9702 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9703 available_hfr_configs.add(fps);
9704 available_hfr_configs.add(fps);
9705 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9706 }
9707 }
9708 }
9709 //Advertise HFR capability only if the property is set
9710 memset(prop, 0, sizeof(prop));
9711 property_get("persist.camera.hal3hfr.enable", prop, "1");
9712 uint8_t hfrEnable = (uint8_t)atoi(prop);
9713
9714 if(hfrEnable && available_hfr_configs.array()) {
9715 staticInfo.update(
9716 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9717 available_hfr_configs.array(), available_hfr_configs.size());
9718 }
9719
9720 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9721 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9722 &max_jpeg_size, 1);
9723
9724 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9725 size_t size = 0;
9726 count = CAM_EFFECT_MODE_MAX;
9727 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9728 for (size_t i = 0; i < count; i++) {
9729 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9730 gCamCapability[cameraId]->supported_effects[i]);
9731 if (NAME_NOT_FOUND != val) {
9732 avail_effects[size] = (uint8_t)val;
9733 size++;
9734 }
9735 }
9736 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9737 avail_effects,
9738 size);
9739
9740 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9741 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9742 size_t supported_scene_modes_cnt = 0;
9743 count = CAM_SCENE_MODE_MAX;
9744 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9745 for (size_t i = 0; i < count; i++) {
9746 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9747 CAM_SCENE_MODE_OFF) {
9748 int val = lookupFwkName(SCENE_MODES_MAP,
9749 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9750 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009751
Thierry Strudel3d639192016-09-09 11:52:26 -07009752 if (NAME_NOT_FOUND != val) {
9753 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9754 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9755 supported_scene_modes_cnt++;
9756 }
9757 }
9758 }
9759 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9760 avail_scene_modes,
9761 supported_scene_modes_cnt);
9762
9763 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9764 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9765 supported_scene_modes_cnt,
9766 CAM_SCENE_MODE_MAX,
9767 scene_mode_overrides,
9768 supported_indexes,
9769 cameraId);
9770
9771 if (supported_scene_modes_cnt == 0) {
9772 supported_scene_modes_cnt = 1;
9773 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9774 }
9775
9776 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9777 scene_mode_overrides, supported_scene_modes_cnt * 3);
9778
9779 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9780 ANDROID_CONTROL_MODE_AUTO,
9781 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9782 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9783 available_control_modes,
9784 3);
9785
9786 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9787 size = 0;
9788 count = CAM_ANTIBANDING_MODE_MAX;
9789 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9790 for (size_t i = 0; i < count; i++) {
9791 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9792 gCamCapability[cameraId]->supported_antibandings[i]);
9793 if (NAME_NOT_FOUND != val) {
9794 avail_antibanding_modes[size] = (uint8_t)val;
9795 size++;
9796 }
9797
9798 }
9799 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9800 avail_antibanding_modes,
9801 size);
9802
9803 uint8_t avail_abberation_modes[] = {
9804 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9805 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9806 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9807 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9808 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9809 if (0 == count) {
9810 // If no aberration correction modes are available for a device, this advertise OFF mode
9811 size = 1;
9812 } else {
9813 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9814 // So, advertize all 3 modes if atleast any one mode is supported as per the
9815 // new M requirement
9816 size = 3;
9817 }
9818 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9819 avail_abberation_modes,
9820 size);
9821
9822 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9823 size = 0;
9824 count = CAM_FOCUS_MODE_MAX;
9825 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9826 for (size_t i = 0; i < count; i++) {
9827 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9828 gCamCapability[cameraId]->supported_focus_modes[i]);
9829 if (NAME_NOT_FOUND != val) {
9830 avail_af_modes[size] = (uint8_t)val;
9831 size++;
9832 }
9833 }
9834 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9835 avail_af_modes,
9836 size);
9837
9838 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9839 size = 0;
9840 count = CAM_WB_MODE_MAX;
9841 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9842 for (size_t i = 0; i < count; i++) {
9843 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9844 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9845 gCamCapability[cameraId]->supported_white_balances[i]);
9846 if (NAME_NOT_FOUND != val) {
9847 avail_awb_modes[size] = (uint8_t)val;
9848 size++;
9849 }
9850 }
9851 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9852 avail_awb_modes,
9853 size);
9854
9855 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9856 count = CAM_FLASH_FIRING_LEVEL_MAX;
9857 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9858 count);
9859 for (size_t i = 0; i < count; i++) {
9860 available_flash_levels[i] =
9861 gCamCapability[cameraId]->supported_firing_levels[i];
9862 }
9863 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9864 available_flash_levels, count);
9865
9866 uint8_t flashAvailable;
9867 if (gCamCapability[cameraId]->flash_available)
9868 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9869 else
9870 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9871 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9872 &flashAvailable, 1);
9873
9874 Vector<uint8_t> avail_ae_modes;
9875 count = CAM_AE_MODE_MAX;
9876 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9877 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009878 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9879 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9880 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9881 }
9882 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009883 }
9884 if (flashAvailable) {
9885 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9886 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9887 }
9888 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9889 avail_ae_modes.array(),
9890 avail_ae_modes.size());
9891
9892 int32_t sensitivity_range[2];
9893 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9894 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9895 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9896 sensitivity_range,
9897 sizeof(sensitivity_range) / sizeof(int32_t));
9898
9899 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9900 &gCamCapability[cameraId]->max_analog_sensitivity,
9901 1);
9902
9903 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9904 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9905 &sensor_orientation,
9906 1);
9907
9908 int32_t max_output_streams[] = {
9909 MAX_STALLING_STREAMS,
9910 MAX_PROCESSED_STREAMS,
9911 MAX_RAW_STREAMS};
9912 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9913 max_output_streams,
9914 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9915
9916 uint8_t avail_leds = 0;
9917 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9918 &avail_leds, 0);
9919
9920 uint8_t focus_dist_calibrated;
9921 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9922 gCamCapability[cameraId]->focus_dist_calibrated);
9923 if (NAME_NOT_FOUND != val) {
9924 focus_dist_calibrated = (uint8_t)val;
9925 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9926 &focus_dist_calibrated, 1);
9927 }
9928
9929 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9930 size = 0;
9931 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9932 MAX_TEST_PATTERN_CNT);
9933 for (size_t i = 0; i < count; i++) {
9934 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9935 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9936 if (NAME_NOT_FOUND != testpatternMode) {
9937 avail_testpattern_modes[size] = testpatternMode;
9938 size++;
9939 }
9940 }
9941 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9942 avail_testpattern_modes,
9943 size);
9944
9945 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9946 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9947 &max_pipeline_depth,
9948 1);
9949
9950 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9951 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9952 &partial_result_count,
9953 1);
9954
9955 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9956 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9957
9958 Vector<uint8_t> available_capabilities;
9959 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9960 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9961 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9962 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9963 if (supportBurst) {
9964 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9965 }
9966 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9967 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9968 if (hfrEnable && available_hfr_configs.array()) {
9969 available_capabilities.add(
9970 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9971 }
9972
9973 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9974 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9975 }
9976 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9977 available_capabilities.array(),
9978 available_capabilities.size());
9979
9980 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9981 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9982 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9983 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9984
9985 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9986 &aeLockAvailable, 1);
9987
9988 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9989 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9990 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9991 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9992
9993 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9994 &awbLockAvailable, 1);
9995
9996 int32_t max_input_streams = 1;
9997 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9998 &max_input_streams,
9999 1);
10000
10001 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10002 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10003 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10004 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10005 HAL_PIXEL_FORMAT_YCbCr_420_888};
10006 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10007 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10008
10009 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10010 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10011 &max_latency,
10012 1);
10013
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010014#ifndef USE_HAL_3_3
10015 int32_t isp_sensitivity_range[2];
10016 isp_sensitivity_range[0] =
10017 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10018 isp_sensitivity_range[1] =
10019 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10020 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10021 isp_sensitivity_range,
10022 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10023#endif
10024
Thierry Strudel3d639192016-09-09 11:52:26 -070010025 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10026 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10027 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10028 available_hot_pixel_modes,
10029 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10030
10031 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10032 ANDROID_SHADING_MODE_FAST,
10033 ANDROID_SHADING_MODE_HIGH_QUALITY};
10034 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10035 available_shading_modes,
10036 3);
10037
10038 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10039 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10040 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10041 available_lens_shading_map_modes,
10042 2);
10043
10044 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10045 ANDROID_EDGE_MODE_FAST,
10046 ANDROID_EDGE_MODE_HIGH_QUALITY,
10047 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10048 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10049 available_edge_modes,
10050 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10051
10052 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10053 ANDROID_NOISE_REDUCTION_MODE_FAST,
10054 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10055 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10056 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10057 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10058 available_noise_red_modes,
10059 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10060
10061 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10062 ANDROID_TONEMAP_MODE_FAST,
10063 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10064 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10065 available_tonemap_modes,
10066 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10067
10068 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10069 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10070 available_hot_pixel_map_modes,
10071 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10072
10073 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10074 gCamCapability[cameraId]->reference_illuminant1);
10075 if (NAME_NOT_FOUND != val) {
10076 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10077 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10078 }
10079
10080 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10081 gCamCapability[cameraId]->reference_illuminant2);
10082 if (NAME_NOT_FOUND != val) {
10083 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10084 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10085 }
10086
10087 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10088 (void *)gCamCapability[cameraId]->forward_matrix1,
10089 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10090
10091 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10092 (void *)gCamCapability[cameraId]->forward_matrix2,
10093 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10094
10095 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10096 (void *)gCamCapability[cameraId]->color_transform1,
10097 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10098
10099 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10100 (void *)gCamCapability[cameraId]->color_transform2,
10101 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10102
10103 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10104 (void *)gCamCapability[cameraId]->calibration_transform1,
10105 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10106
10107 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10108 (void *)gCamCapability[cameraId]->calibration_transform2,
10109 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10110
10111 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10112 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10113 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10114 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10115 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10116 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10117 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10118 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10119 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10120 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10121 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10122 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10123 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10124 ANDROID_JPEG_GPS_COORDINATES,
10125 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10126 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10127 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10128 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10129 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10130 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10131 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10132 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10133 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10134 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010135#ifndef USE_HAL_3_3
10136 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10137#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010138 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010139 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010140 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10141 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010142 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010143 /* DevCamDebug metadata request_keys_basic */
10144 DEVCAMDEBUG_META_ENABLE,
10145 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010146 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010147 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010148 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010149 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010150 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010151 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010152
10153 size_t request_keys_cnt =
10154 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10155 Vector<int32_t> available_request_keys;
10156 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10157 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10158 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10159 }
10160
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010161 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010162 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10163 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10164 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010165 }
10166
Thierry Strudel3d639192016-09-09 11:52:26 -070010167 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10168 available_request_keys.array(), available_request_keys.size());
10169
10170 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10171 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10172 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10173 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10174 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10175 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10176 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10177 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10178 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10179 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10180 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10181 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10182 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10183 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10184 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10185 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10186 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010187 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010188 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10189 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10190 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010191 ANDROID_STATISTICS_FACE_SCORES,
10192#ifndef USE_HAL_3_3
10193 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10194#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010195 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010196 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010197 // DevCamDebug metadata result_keys_basic
10198 DEVCAMDEBUG_META_ENABLE,
10199 // DevCamDebug metadata result_keys AF
10200 DEVCAMDEBUG_AF_LENS_POSITION,
10201 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10202 DEVCAMDEBUG_AF_TOF_DISTANCE,
10203 DEVCAMDEBUG_AF_LUMA,
10204 DEVCAMDEBUG_AF_HAF_STATE,
10205 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10206 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10207 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10208 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10209 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10210 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10211 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10212 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10213 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10214 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10215 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10216 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10217 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10218 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10219 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10220 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10221 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10222 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10223 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10224 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10225 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10226 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10227 // DevCamDebug metadata result_keys AEC
10228 DEVCAMDEBUG_AEC_TARGET_LUMA,
10229 DEVCAMDEBUG_AEC_COMP_LUMA,
10230 DEVCAMDEBUG_AEC_AVG_LUMA,
10231 DEVCAMDEBUG_AEC_CUR_LUMA,
10232 DEVCAMDEBUG_AEC_LINECOUNT,
10233 DEVCAMDEBUG_AEC_REAL_GAIN,
10234 DEVCAMDEBUG_AEC_EXP_INDEX,
10235 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010236 // DevCamDebug metadata result_keys zzHDR
10237 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10238 DEVCAMDEBUG_AEC_L_LINECOUNT,
10239 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10240 DEVCAMDEBUG_AEC_S_LINECOUNT,
10241 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10242 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10243 // DevCamDebug metadata result_keys ADRC
10244 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10245 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10246 DEVCAMDEBUG_AEC_GTM_RATIO,
10247 DEVCAMDEBUG_AEC_LTM_RATIO,
10248 DEVCAMDEBUG_AEC_LA_RATIO,
10249 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010250 // DevCamDebug metadata result_keys AEC MOTION
10251 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10252 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10253 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010254 // DevCamDebug metadata result_keys AWB
10255 DEVCAMDEBUG_AWB_R_GAIN,
10256 DEVCAMDEBUG_AWB_G_GAIN,
10257 DEVCAMDEBUG_AWB_B_GAIN,
10258 DEVCAMDEBUG_AWB_CCT,
10259 DEVCAMDEBUG_AWB_DECISION,
10260 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010261 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10262 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10263 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010264 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010265 };
10266
Thierry Strudel3d639192016-09-09 11:52:26 -070010267 size_t result_keys_cnt =
10268 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10269
10270 Vector<int32_t> available_result_keys;
10271 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10272 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10273 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10274 }
10275 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10276 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10277 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10278 }
10279 if (supportedFaceDetectMode == 1) {
10280 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10281 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10282 } else if ((supportedFaceDetectMode == 2) ||
10283 (supportedFaceDetectMode == 3)) {
10284 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10285 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10286 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010287#ifndef USE_HAL_3_3
10288 if (hasBlackRegions) {
10289 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10290 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10291 }
10292#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010293
10294 if (gExposeEnableZslKey) {
10295 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10296 }
10297
Thierry Strudel3d639192016-09-09 11:52:26 -070010298 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10299 available_result_keys.array(), available_result_keys.size());
10300
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010301 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010302 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10303 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10304 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10305 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10306 ANDROID_SCALER_CROPPING_TYPE,
10307 ANDROID_SYNC_MAX_LATENCY,
10308 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10309 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10310 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10311 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10312 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10313 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10314 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10315 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10316 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10317 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10318 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10319 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10320 ANDROID_LENS_FACING,
10321 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10322 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10323 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10324 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10325 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10326 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10327 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10328 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10329 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10330 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10331 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10332 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10333 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10334 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10335 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10336 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10337 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10338 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10339 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10340 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010341 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010342 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10343 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10344 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10345 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10346 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10347 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10348 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10349 ANDROID_CONTROL_AVAILABLE_MODES,
10350 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10351 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10352 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10353 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010354 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10355#ifndef USE_HAL_3_3
10356 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10357 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10358#endif
10359 };
10360
10361 Vector<int32_t> available_characteristics_keys;
10362 available_characteristics_keys.appendArray(characteristics_keys_basic,
10363 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10364#ifndef USE_HAL_3_3
10365 if (hasBlackRegions) {
10366 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10367 }
10368#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010369
10370 if (0 <= indexPD) {
10371 int32_t depthKeys[] = {
10372 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10373 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10374 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10375 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10376 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10377 };
10378 available_characteristics_keys.appendArray(depthKeys,
10379 sizeof(depthKeys) / sizeof(depthKeys[0]));
10380 }
10381
Thierry Strudel3d639192016-09-09 11:52:26 -070010382 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010383 available_characteristics_keys.array(),
10384 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010385
10386 /*available stall durations depend on the hw + sw and will be different for different devices */
10387 /*have to add for raw after implementation*/
10388 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10389 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10390
10391 Vector<int64_t> available_stall_durations;
10392 for (uint32_t j = 0; j < stall_formats_count; j++) {
10393 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10394 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10395 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10396 available_stall_durations.add(stall_formats[j]);
10397 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10398 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10399 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10400 }
10401 } else {
10402 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10403 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10404 available_stall_durations.add(stall_formats[j]);
10405 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10406 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10407 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10408 }
10409 }
10410 }
10411 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10412 available_stall_durations.array(),
10413 available_stall_durations.size());
10414
10415 //QCAMERA3_OPAQUE_RAW
10416 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10417 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10418 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10419 case LEGACY_RAW:
10420 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10421 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10422 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10423 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10424 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10425 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10426 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10427 break;
10428 case MIPI_RAW:
10429 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10430 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10431 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10432 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10433 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10434 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10435 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10436 break;
10437 default:
10438 LOGE("unknown opaque_raw_format %d",
10439 gCamCapability[cameraId]->opaque_raw_fmt);
10440 break;
10441 }
10442 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10443
10444 Vector<int32_t> strides;
10445 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10446 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10447 cam_stream_buf_plane_info_t buf_planes;
10448 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10449 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10450 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10451 &gCamCapability[cameraId]->padding_info, &buf_planes);
10452 strides.add(buf_planes.plane_info.mp[0].stride);
10453 }
10454 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10455 strides.size());
10456
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010457 //TBD: remove the following line once backend advertises zzHDR in feature mask
10458 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010459 //Video HDR default
10460 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10461 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010462 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010463 int32_t vhdr_mode[] = {
10464 QCAMERA3_VIDEO_HDR_MODE_OFF,
10465 QCAMERA3_VIDEO_HDR_MODE_ON};
10466
10467 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10468 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10469 vhdr_mode, vhdr_mode_count);
10470 }
10471
Thierry Strudel3d639192016-09-09 11:52:26 -070010472 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10473 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10474 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10475
10476 uint8_t isMonoOnly =
10477 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10478 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10479 &isMonoOnly, 1);
10480
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010481#ifndef USE_HAL_3_3
10482 Vector<int32_t> opaque_size;
10483 for (size_t j = 0; j < scalar_formats_count; j++) {
10484 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10485 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10486 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10487 cam_stream_buf_plane_info_t buf_planes;
10488
10489 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10490 &gCamCapability[cameraId]->padding_info, &buf_planes);
10491
10492 if (rc == 0) {
10493 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10494 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10495 opaque_size.add(buf_planes.plane_info.frame_len);
10496 }else {
10497 LOGE("raw frame calculation failed!");
10498 }
10499 }
10500 }
10501 }
10502
10503 if ((opaque_size.size() > 0) &&
10504 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10505 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10506 else
10507 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10508#endif
10509
Thierry Strudel04e026f2016-10-10 11:27:36 -070010510 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10511 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10512 size = 0;
10513 count = CAM_IR_MODE_MAX;
10514 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10515 for (size_t i = 0; i < count; i++) {
10516 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10517 gCamCapability[cameraId]->supported_ir_modes[i]);
10518 if (NAME_NOT_FOUND != val) {
10519 avail_ir_modes[size] = (int32_t)val;
10520 size++;
10521 }
10522 }
10523 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10524 avail_ir_modes, size);
10525 }
10526
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010527 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10528 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10529 size = 0;
10530 count = CAM_AEC_CONVERGENCE_MAX;
10531 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10532 for (size_t i = 0; i < count; i++) {
10533 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10534 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10535 if (NAME_NOT_FOUND != val) {
10536 available_instant_aec_modes[size] = (int32_t)val;
10537 size++;
10538 }
10539 }
10540 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10541 available_instant_aec_modes, size);
10542 }
10543
Thierry Strudel54dc9782017-02-15 12:12:10 -080010544 int32_t sharpness_range[] = {
10545 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10546 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10547 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10548
10549 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10550 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10551 size = 0;
10552 count = CAM_BINNING_CORRECTION_MODE_MAX;
10553 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10554 for (size_t i = 0; i < count; i++) {
10555 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10556 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10557 gCamCapability[cameraId]->supported_binning_modes[i]);
10558 if (NAME_NOT_FOUND != val) {
10559 avail_binning_modes[size] = (int32_t)val;
10560 size++;
10561 }
10562 }
10563 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10564 avail_binning_modes, size);
10565 }
10566
10567 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10568 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10569 size = 0;
10570 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10571 for (size_t i = 0; i < count; i++) {
10572 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10573 gCamCapability[cameraId]->supported_aec_modes[i]);
10574 if (NAME_NOT_FOUND != val)
10575 available_aec_modes[size++] = val;
10576 }
10577 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10578 available_aec_modes, size);
10579 }
10580
10581 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10582 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10583 size = 0;
10584 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10585 for (size_t i = 0; i < count; i++) {
10586 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10587 gCamCapability[cameraId]->supported_iso_modes[i]);
10588 if (NAME_NOT_FOUND != val)
10589 available_iso_modes[size++] = val;
10590 }
10591 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10592 available_iso_modes, size);
10593 }
10594
10595 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010596 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010597 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10598 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10599 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10600
10601 int32_t available_saturation_range[4];
10602 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10603 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10604 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10605 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10606 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10607 available_saturation_range, 4);
10608
10609 uint8_t is_hdr_values[2];
10610 is_hdr_values[0] = 0;
10611 is_hdr_values[1] = 1;
10612 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10613 is_hdr_values, 2);
10614
10615 float is_hdr_confidence_range[2];
10616 is_hdr_confidence_range[0] = 0.0;
10617 is_hdr_confidence_range[1] = 1.0;
10618 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10619 is_hdr_confidence_range, 2);
10620
Emilian Peev0a972ef2017-03-16 10:25:53 +000010621 size_t eepromLength = strnlen(
10622 reinterpret_cast<const char *>(
10623 gCamCapability[cameraId]->eeprom_version_info),
10624 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10625 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010626 char easelInfo[] = ",E:N";
10627 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10628 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10629 eepromLength += sizeof(easelInfo);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010630 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10631 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010632 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010633 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010634 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10635 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10636 }
10637
Thierry Strudel3d639192016-09-09 11:52:26 -070010638 gStaticMetadata[cameraId] = staticInfo.release();
10639 return rc;
10640}
10641
10642/*===========================================================================
10643 * FUNCTION : makeTable
10644 *
10645 * DESCRIPTION: make a table of sizes
10646 *
10647 * PARAMETERS :
10648 *
10649 *
10650 *==========================================================================*/
10651void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10652 size_t max_size, int32_t *sizeTable)
10653{
10654 size_t j = 0;
10655 if (size > max_size) {
10656 size = max_size;
10657 }
10658 for (size_t i = 0; i < size; i++) {
10659 sizeTable[j] = dimTable[i].width;
10660 sizeTable[j+1] = dimTable[i].height;
10661 j+=2;
10662 }
10663}
10664
10665/*===========================================================================
10666 * FUNCTION : makeFPSTable
10667 *
10668 * DESCRIPTION: make a table of fps ranges
10669 *
10670 * PARAMETERS :
10671 *
10672 *==========================================================================*/
10673void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10674 size_t max_size, int32_t *fpsRangesTable)
10675{
10676 size_t j = 0;
10677 if (size > max_size) {
10678 size = max_size;
10679 }
10680 for (size_t i = 0; i < size; i++) {
10681 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10682 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10683 j+=2;
10684 }
10685}
10686
10687/*===========================================================================
10688 * FUNCTION : makeOverridesList
10689 *
10690 * DESCRIPTION: make a list of scene mode overrides
10691 *
10692 * PARAMETERS :
10693 *
10694 *
10695 *==========================================================================*/
10696void QCamera3HardwareInterface::makeOverridesList(
10697 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10698 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10699{
10700 /*daemon will give a list of overrides for all scene modes.
10701 However we should send the fwk only the overrides for the scene modes
10702 supported by the framework*/
10703 size_t j = 0;
10704 if (size > max_size) {
10705 size = max_size;
10706 }
10707 size_t focus_count = CAM_FOCUS_MODE_MAX;
10708 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10709 focus_count);
10710 for (size_t i = 0; i < size; i++) {
10711 bool supt = false;
10712 size_t index = supported_indexes[i];
10713 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10714 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10715 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10716 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10717 overridesTable[index].awb_mode);
10718 if (NAME_NOT_FOUND != val) {
10719 overridesList[j+1] = (uint8_t)val;
10720 }
10721 uint8_t focus_override = overridesTable[index].af_mode;
10722 for (size_t k = 0; k < focus_count; k++) {
10723 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10724 supt = true;
10725 break;
10726 }
10727 }
10728 if (supt) {
10729 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10730 focus_override);
10731 if (NAME_NOT_FOUND != val) {
10732 overridesList[j+2] = (uint8_t)val;
10733 }
10734 } else {
10735 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10736 }
10737 j+=3;
10738 }
10739}
10740
10741/*===========================================================================
10742 * FUNCTION : filterJpegSizes
10743 *
10744 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10745 * could be downscaled to
10746 *
10747 * PARAMETERS :
10748 *
10749 * RETURN : length of jpegSizes array
10750 *==========================================================================*/
10751
10752size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10753 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10754 uint8_t downscale_factor)
10755{
10756 if (0 == downscale_factor) {
10757 downscale_factor = 1;
10758 }
10759
10760 int32_t min_width = active_array_size.width / downscale_factor;
10761 int32_t min_height = active_array_size.height / downscale_factor;
10762 size_t jpegSizesCnt = 0;
10763 if (processedSizesCnt > maxCount) {
10764 processedSizesCnt = maxCount;
10765 }
10766 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10767 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10768 jpegSizes[jpegSizesCnt] = processedSizes[i];
10769 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10770 jpegSizesCnt += 2;
10771 }
10772 }
10773 return jpegSizesCnt;
10774}
10775
10776/*===========================================================================
10777 * FUNCTION : computeNoiseModelEntryS
10778 *
10779 * DESCRIPTION: function to map a given sensitivity to the S noise
10780 * model parameters in the DNG noise model.
10781 *
10782 * PARAMETERS : sens : the sensor sensitivity
10783 *
10784 ** RETURN : S (sensor amplification) noise
10785 *
10786 *==========================================================================*/
10787double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10788 double s = gCamCapability[mCameraId]->gradient_S * sens +
10789 gCamCapability[mCameraId]->offset_S;
10790 return ((s < 0.0) ? 0.0 : s);
10791}
10792
10793/*===========================================================================
10794 * FUNCTION : computeNoiseModelEntryO
10795 *
10796 * DESCRIPTION: function to map a given sensitivity to the O noise
10797 * model parameters in the DNG noise model.
10798 *
10799 * PARAMETERS : sens : the sensor sensitivity
10800 *
10801 ** RETURN : O (sensor readout) noise
10802 *
10803 *==========================================================================*/
10804double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10805 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10806 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10807 1.0 : (1.0 * sens / max_analog_sens);
10808 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10809 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10810 return ((o < 0.0) ? 0.0 : o);
10811}
10812
10813/*===========================================================================
10814 * FUNCTION : getSensorSensitivity
10815 *
10816 * DESCRIPTION: convert iso_mode to an integer value
10817 *
10818 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10819 *
10820 ** RETURN : sensitivity supported by sensor
10821 *
10822 *==========================================================================*/
10823int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10824{
10825 int32_t sensitivity;
10826
10827 switch (iso_mode) {
10828 case CAM_ISO_MODE_100:
10829 sensitivity = 100;
10830 break;
10831 case CAM_ISO_MODE_200:
10832 sensitivity = 200;
10833 break;
10834 case CAM_ISO_MODE_400:
10835 sensitivity = 400;
10836 break;
10837 case CAM_ISO_MODE_800:
10838 sensitivity = 800;
10839 break;
10840 case CAM_ISO_MODE_1600:
10841 sensitivity = 1600;
10842 break;
10843 default:
10844 sensitivity = -1;
10845 break;
10846 }
10847 return sensitivity;
10848}
10849
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010850int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010851 if (gEaselManagerClient == nullptr) {
10852 gEaselManagerClient = EaselManagerClient::create();
10853 if (gEaselManagerClient == nullptr) {
10854 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10855 return -ENODEV;
10856 }
10857 }
10858
10859 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010860 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10861 // to connect to Easel.
10862 bool doNotpowerOnEasel =
10863 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10864
10865 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010866 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10867 return OK;
10868 }
10869
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010870 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010871 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010872 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010873 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010874 return res;
10875 }
10876
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010877 EaselManagerClientOpened = true;
10878
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010879 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010880 if (res != OK) {
10881 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10882 }
10883
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010884 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010885 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010886
10887 // Expose enableZsl key only when HDR+ mode is enabled.
10888 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010889 }
10890
10891 return OK;
10892}
10893
Thierry Strudel3d639192016-09-09 11:52:26 -070010894/*===========================================================================
10895 * FUNCTION : getCamInfo
10896 *
10897 * DESCRIPTION: query camera capabilities
10898 *
10899 * PARAMETERS :
10900 * @cameraId : camera Id
10901 * @info : camera info struct to be filled in with camera capabilities
10902 *
10903 * RETURN : int type of status
10904 * NO_ERROR -- success
10905 * none-zero failure code
10906 *==========================================================================*/
10907int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10908 struct camera_info *info)
10909{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010910 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010911 int rc = 0;
10912
10913 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010914
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010915 {
10916 Mutex::Autolock l(gHdrPlusClientLock);
10917 rc = initHdrPlusClientLocked();
10918 if (rc != OK) {
10919 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10920 pthread_mutex_unlock(&gCamLock);
10921 return rc;
10922 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010923 }
10924
Thierry Strudel3d639192016-09-09 11:52:26 -070010925 if (NULL == gCamCapability[cameraId]) {
10926 rc = initCapabilities(cameraId);
10927 if (rc < 0) {
10928 pthread_mutex_unlock(&gCamLock);
10929 return rc;
10930 }
10931 }
10932
10933 if (NULL == gStaticMetadata[cameraId]) {
10934 rc = initStaticMetadata(cameraId);
10935 if (rc < 0) {
10936 pthread_mutex_unlock(&gCamLock);
10937 return rc;
10938 }
10939 }
10940
10941 switch(gCamCapability[cameraId]->position) {
10942 case CAM_POSITION_BACK:
10943 case CAM_POSITION_BACK_AUX:
10944 info->facing = CAMERA_FACING_BACK;
10945 break;
10946
10947 case CAM_POSITION_FRONT:
10948 case CAM_POSITION_FRONT_AUX:
10949 info->facing = CAMERA_FACING_FRONT;
10950 break;
10951
10952 default:
10953 LOGE("Unknown position type %d for camera id:%d",
10954 gCamCapability[cameraId]->position, cameraId);
10955 rc = -1;
10956 break;
10957 }
10958
10959
10960 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010961#ifndef USE_HAL_3_3
10962 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10963#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010964 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010965#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010966 info->static_camera_characteristics = gStaticMetadata[cameraId];
10967
10968 //For now assume both cameras can operate independently.
10969 info->conflicting_devices = NULL;
10970 info->conflicting_devices_length = 0;
10971
10972 //resource cost is 100 * MIN(1.0, m/M),
10973 //where m is throughput requirement with maximum stream configuration
10974 //and M is CPP maximum throughput.
10975 float max_fps = 0.0;
10976 for (uint32_t i = 0;
10977 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10978 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10979 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10980 }
10981 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10982 gCamCapability[cameraId]->active_array_size.width *
10983 gCamCapability[cameraId]->active_array_size.height * max_fps /
10984 gCamCapability[cameraId]->max_pixel_bandwidth;
10985 info->resource_cost = 100 * MIN(1.0, ratio);
10986 LOGI("camera %d resource cost is %d", cameraId,
10987 info->resource_cost);
10988
10989 pthread_mutex_unlock(&gCamLock);
10990 return rc;
10991}
10992
10993/*===========================================================================
10994 * FUNCTION : translateCapabilityToMetadata
10995 *
10996 * DESCRIPTION: translate the capability into camera_metadata_t
10997 *
10998 * PARAMETERS : type of the request
10999 *
11000 *
11001 * RETURN : success: camera_metadata_t*
11002 * failure: NULL
11003 *
11004 *==========================================================================*/
11005camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11006{
11007 if (mDefaultMetadata[type] != NULL) {
11008 return mDefaultMetadata[type];
11009 }
11010 //first time we are handling this request
11011 //fill up the metadata structure using the wrapper class
11012 CameraMetadata settings;
11013 //translate from cam_capability_t to camera_metadata_tag_t
11014 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11015 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11016 int32_t defaultRequestID = 0;
11017 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11018
11019 /* OIS disable */
11020 char ois_prop[PROPERTY_VALUE_MAX];
11021 memset(ois_prop, 0, sizeof(ois_prop));
11022 property_get("persist.camera.ois.disable", ois_prop, "0");
11023 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11024
11025 /* Force video to use OIS */
11026 char videoOisProp[PROPERTY_VALUE_MAX];
11027 memset(videoOisProp, 0, sizeof(videoOisProp));
11028 property_get("persist.camera.ois.video", videoOisProp, "1");
11029 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011030
11031 // Hybrid AE enable/disable
11032 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11033 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11034 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11035 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11036
Thierry Strudel3d639192016-09-09 11:52:26 -070011037 uint8_t controlIntent = 0;
11038 uint8_t focusMode;
11039 uint8_t vsMode;
11040 uint8_t optStabMode;
11041 uint8_t cacMode;
11042 uint8_t edge_mode;
11043 uint8_t noise_red_mode;
11044 uint8_t tonemap_mode;
11045 bool highQualityModeEntryAvailable = FALSE;
11046 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011047 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011048 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11049 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011050 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011051 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011052 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011053
Thierry Strudel3d639192016-09-09 11:52:26 -070011054 switch (type) {
11055 case CAMERA3_TEMPLATE_PREVIEW:
11056 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11057 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11058 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11059 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11060 edge_mode = ANDROID_EDGE_MODE_FAST;
11061 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11062 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11063 break;
11064 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11065 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11066 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11067 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11068 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11069 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11070 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11071 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11072 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11073 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11074 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11075 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11076 highQualityModeEntryAvailable = TRUE;
11077 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11078 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11079 fastModeEntryAvailable = TRUE;
11080 }
11081 }
11082 if (highQualityModeEntryAvailable) {
11083 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11084 } else if (fastModeEntryAvailable) {
11085 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11086 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011087 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11088 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11089 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011090 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011091 break;
11092 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11093 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11094 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11095 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011096 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11097 edge_mode = ANDROID_EDGE_MODE_FAST;
11098 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11099 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11100 if (forceVideoOis)
11101 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11102 break;
11103 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11104 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11105 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11106 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011107 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11108 edge_mode = ANDROID_EDGE_MODE_FAST;
11109 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11110 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11111 if (forceVideoOis)
11112 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11113 break;
11114 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11115 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11116 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11117 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11118 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11119 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11120 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11121 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11122 break;
11123 case CAMERA3_TEMPLATE_MANUAL:
11124 edge_mode = ANDROID_EDGE_MODE_FAST;
11125 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11126 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11127 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11128 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11129 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11130 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11131 break;
11132 default:
11133 edge_mode = ANDROID_EDGE_MODE_FAST;
11134 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11135 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11136 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11137 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11138 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11139 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11140 break;
11141 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011142 // Set CAC to OFF if underlying device doesn't support
11143 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11144 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11145 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011146 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11147 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11148 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11149 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11150 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11151 }
11152 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011153 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011154 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011155
11156 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11157 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11158 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11159 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11160 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11161 || ois_disable)
11162 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11163 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011164 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011165
11166 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11167 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11168
11169 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11170 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11171
11172 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11173 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11174
11175 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11176 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11177
11178 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11179 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11180
11181 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11182 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11183
11184 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11185 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11186
11187 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11188 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11189
11190 /*flash*/
11191 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11192 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11193
11194 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11195 settings.update(ANDROID_FLASH_FIRING_POWER,
11196 &flashFiringLevel, 1);
11197
11198 /* lens */
11199 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11200 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11201
11202 if (gCamCapability[mCameraId]->filter_densities_count) {
11203 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11204 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11205 gCamCapability[mCameraId]->filter_densities_count);
11206 }
11207
11208 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11209 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11210
Thierry Strudel3d639192016-09-09 11:52:26 -070011211 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11212 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11213
11214 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11215 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11216
11217 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11218 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11219
11220 /* face detection (default to OFF) */
11221 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11222 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11223
Thierry Strudel54dc9782017-02-15 12:12:10 -080011224 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11225 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011226
11227 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11228 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11229
11230 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11231 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11232
Thierry Strudel3d639192016-09-09 11:52:26 -070011233
11234 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11235 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11236
11237 /* Exposure time(Update the Min Exposure Time)*/
11238 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11239 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11240
11241 /* frame duration */
11242 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11243 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11244
11245 /* sensitivity */
11246 static const int32_t default_sensitivity = 100;
11247 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011248#ifndef USE_HAL_3_3
11249 static const int32_t default_isp_sensitivity =
11250 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11251 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11252#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011253
11254 /*edge mode*/
11255 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11256
11257 /*noise reduction mode*/
11258 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11259
11260 /*color correction mode*/
11261 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11262 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11263
11264 /*transform matrix mode*/
11265 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11266
11267 int32_t scaler_crop_region[4];
11268 scaler_crop_region[0] = 0;
11269 scaler_crop_region[1] = 0;
11270 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11271 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11272 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11273
11274 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11275 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11276
11277 /*focus distance*/
11278 float focus_distance = 0.0;
11279 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11280
11281 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011282 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011283 float max_range = 0.0;
11284 float max_fixed_fps = 0.0;
11285 int32_t fps_range[2] = {0, 0};
11286 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11287 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011288 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11289 TEMPLATE_MAX_PREVIEW_FPS) {
11290 continue;
11291 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011292 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11293 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11294 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11295 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11296 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11297 if (range > max_range) {
11298 fps_range[0] =
11299 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11300 fps_range[1] =
11301 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11302 max_range = range;
11303 }
11304 } else {
11305 if (range < 0.01 && max_fixed_fps <
11306 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11307 fps_range[0] =
11308 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11309 fps_range[1] =
11310 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11311 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11312 }
11313 }
11314 }
11315 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11316
11317 /*precapture trigger*/
11318 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11319 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11320
11321 /*af trigger*/
11322 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11323 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11324
11325 /* ae & af regions */
11326 int32_t active_region[] = {
11327 gCamCapability[mCameraId]->active_array_size.left,
11328 gCamCapability[mCameraId]->active_array_size.top,
11329 gCamCapability[mCameraId]->active_array_size.left +
11330 gCamCapability[mCameraId]->active_array_size.width,
11331 gCamCapability[mCameraId]->active_array_size.top +
11332 gCamCapability[mCameraId]->active_array_size.height,
11333 0};
11334 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11335 sizeof(active_region) / sizeof(active_region[0]));
11336 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11337 sizeof(active_region) / sizeof(active_region[0]));
11338
11339 /* black level lock */
11340 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11341 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11342
Thierry Strudel3d639192016-09-09 11:52:26 -070011343 //special defaults for manual template
11344 if (type == CAMERA3_TEMPLATE_MANUAL) {
11345 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11346 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11347
11348 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11349 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11350
11351 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11352 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11353
11354 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11355 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11356
11357 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11358 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11359
11360 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11361 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11362 }
11363
11364
11365 /* TNR
11366 * We'll use this location to determine which modes TNR will be set.
11367 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11368 * This is not to be confused with linking on a per stream basis that decision
11369 * is still on per-session basis and will be handled as part of config stream
11370 */
11371 uint8_t tnr_enable = 0;
11372
11373 if (m_bTnrPreview || m_bTnrVideo) {
11374
11375 switch (type) {
11376 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11377 tnr_enable = 1;
11378 break;
11379
11380 default:
11381 tnr_enable = 0;
11382 break;
11383 }
11384
11385 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11386 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11387 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11388
11389 LOGD("TNR:%d with process plate %d for template:%d",
11390 tnr_enable, tnr_process_type, type);
11391 }
11392
11393 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011394 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011395 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11396
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011397 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011398 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11399
Shuzhen Wang920ea402017-05-03 08:49:39 -070011400 uint8_t related_camera_id = mCameraId;
11401 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011402
11403 /* CDS default */
11404 char prop[PROPERTY_VALUE_MAX];
11405 memset(prop, 0, sizeof(prop));
11406 property_get("persist.camera.CDS", prop, "Auto");
11407 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11408 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11409 if (CAM_CDS_MODE_MAX == cds_mode) {
11410 cds_mode = CAM_CDS_MODE_AUTO;
11411 }
11412
11413 /* Disabling CDS in templates which have TNR enabled*/
11414 if (tnr_enable)
11415 cds_mode = CAM_CDS_MODE_OFF;
11416
11417 int32_t mode = cds_mode;
11418 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011419
Thierry Strudel269c81a2016-10-12 12:13:59 -070011420 /* Manual Convergence AEC Speed is disabled by default*/
11421 float default_aec_speed = 0;
11422 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11423
11424 /* Manual Convergence AWB Speed is disabled by default*/
11425 float default_awb_speed = 0;
11426 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11427
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011428 // Set instant AEC to normal convergence by default
11429 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11430 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11431
Shuzhen Wang19463d72016-03-08 11:09:52 -080011432 /* hybrid ae */
11433 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11434
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011435 if (gExposeEnableZslKey) {
11436 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11437 }
11438
Thierry Strudel3d639192016-09-09 11:52:26 -070011439 mDefaultMetadata[type] = settings.release();
11440
11441 return mDefaultMetadata[type];
11442}
11443
11444/*===========================================================================
11445 * FUNCTION : setFrameParameters
11446 *
11447 * DESCRIPTION: set parameters per frame as requested in the metadata from
11448 * framework
11449 *
11450 * PARAMETERS :
11451 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011452 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011453 * @blob_request: Whether this request is a blob request or not
11454 *
11455 * RETURN : success: NO_ERROR
11456 * failure:
11457 *==========================================================================*/
11458int QCamera3HardwareInterface::setFrameParameters(
11459 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011460 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011461 int blob_request,
11462 uint32_t snapshotStreamId)
11463{
11464 /*translate from camera_metadata_t type to parm_type_t*/
11465 int rc = 0;
11466 int32_t hal_version = CAM_HAL_V3;
11467
11468 clear_metadata_buffer(mParameters);
11469 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11470 LOGE("Failed to set hal version in the parameters");
11471 return BAD_VALUE;
11472 }
11473
11474 /*we need to update the frame number in the parameters*/
11475 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11476 request->frame_number)) {
11477 LOGE("Failed to set the frame number in the parameters");
11478 return BAD_VALUE;
11479 }
11480
11481 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011482 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011483 LOGE("Failed to set stream type mask in the parameters");
11484 return BAD_VALUE;
11485 }
11486
11487 if (mUpdateDebugLevel) {
11488 uint32_t dummyDebugLevel = 0;
11489 /* The value of dummyDebugLevel is irrelavent. On
11490 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11491 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11492 dummyDebugLevel)) {
11493 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11494 return BAD_VALUE;
11495 }
11496 mUpdateDebugLevel = false;
11497 }
11498
11499 if(request->settings != NULL){
11500 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11501 if (blob_request)
11502 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11503 }
11504
11505 return rc;
11506}
11507
11508/*===========================================================================
11509 * FUNCTION : setReprocParameters
11510 *
11511 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11512 * return it.
11513 *
11514 * PARAMETERS :
11515 * @request : request that needs to be serviced
11516 *
11517 * RETURN : success: NO_ERROR
11518 * failure:
11519 *==========================================================================*/
11520int32_t QCamera3HardwareInterface::setReprocParameters(
11521 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11522 uint32_t snapshotStreamId)
11523{
11524 /*translate from camera_metadata_t type to parm_type_t*/
11525 int rc = 0;
11526
11527 if (NULL == request->settings){
11528 LOGE("Reprocess settings cannot be NULL");
11529 return BAD_VALUE;
11530 }
11531
11532 if (NULL == reprocParam) {
11533 LOGE("Invalid reprocessing metadata buffer");
11534 return BAD_VALUE;
11535 }
11536 clear_metadata_buffer(reprocParam);
11537
11538 /*we need to update the frame number in the parameters*/
11539 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11540 request->frame_number)) {
11541 LOGE("Failed to set the frame number in the parameters");
11542 return BAD_VALUE;
11543 }
11544
11545 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11546 if (rc < 0) {
11547 LOGE("Failed to translate reproc request");
11548 return rc;
11549 }
11550
11551 CameraMetadata frame_settings;
11552 frame_settings = request->settings;
11553 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11554 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11555 int32_t *crop_count =
11556 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11557 int32_t *crop_data =
11558 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11559 int32_t *roi_map =
11560 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11561 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11562 cam_crop_data_t crop_meta;
11563 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11564 crop_meta.num_of_streams = 1;
11565 crop_meta.crop_info[0].crop.left = crop_data[0];
11566 crop_meta.crop_info[0].crop.top = crop_data[1];
11567 crop_meta.crop_info[0].crop.width = crop_data[2];
11568 crop_meta.crop_info[0].crop.height = crop_data[3];
11569
11570 crop_meta.crop_info[0].roi_map.left =
11571 roi_map[0];
11572 crop_meta.crop_info[0].roi_map.top =
11573 roi_map[1];
11574 crop_meta.crop_info[0].roi_map.width =
11575 roi_map[2];
11576 crop_meta.crop_info[0].roi_map.height =
11577 roi_map[3];
11578
11579 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11580 rc = BAD_VALUE;
11581 }
11582 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11583 request->input_buffer->stream,
11584 crop_meta.crop_info[0].crop.left,
11585 crop_meta.crop_info[0].crop.top,
11586 crop_meta.crop_info[0].crop.width,
11587 crop_meta.crop_info[0].crop.height);
11588 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11589 request->input_buffer->stream,
11590 crop_meta.crop_info[0].roi_map.left,
11591 crop_meta.crop_info[0].roi_map.top,
11592 crop_meta.crop_info[0].roi_map.width,
11593 crop_meta.crop_info[0].roi_map.height);
11594 } else {
11595 LOGE("Invalid reprocess crop count %d!", *crop_count);
11596 }
11597 } else {
11598 LOGE("No crop data from matching output stream");
11599 }
11600
11601 /* These settings are not needed for regular requests so handle them specially for
11602 reprocess requests; information needed for EXIF tags */
11603 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11604 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11605 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11606 if (NAME_NOT_FOUND != val) {
11607 uint32_t flashMode = (uint32_t)val;
11608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11609 rc = BAD_VALUE;
11610 }
11611 } else {
11612 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11613 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11614 }
11615 } else {
11616 LOGH("No flash mode in reprocess settings");
11617 }
11618
11619 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11620 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11622 rc = BAD_VALUE;
11623 }
11624 } else {
11625 LOGH("No flash state in reprocess settings");
11626 }
11627
11628 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11629 uint8_t *reprocessFlags =
11630 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11631 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11632 *reprocessFlags)) {
11633 rc = BAD_VALUE;
11634 }
11635 }
11636
Thierry Strudel54dc9782017-02-15 12:12:10 -080011637 // Add exif debug data to internal metadata
11638 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11639 mm_jpeg_debug_exif_params_t *debug_params =
11640 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11641 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11642 // AE
11643 if (debug_params->ae_debug_params_valid == TRUE) {
11644 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11645 debug_params->ae_debug_params);
11646 }
11647 // AWB
11648 if (debug_params->awb_debug_params_valid == TRUE) {
11649 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11650 debug_params->awb_debug_params);
11651 }
11652 // AF
11653 if (debug_params->af_debug_params_valid == TRUE) {
11654 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11655 debug_params->af_debug_params);
11656 }
11657 // ASD
11658 if (debug_params->asd_debug_params_valid == TRUE) {
11659 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11660 debug_params->asd_debug_params);
11661 }
11662 // Stats
11663 if (debug_params->stats_debug_params_valid == TRUE) {
11664 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11665 debug_params->stats_debug_params);
11666 }
11667 // BE Stats
11668 if (debug_params->bestats_debug_params_valid == TRUE) {
11669 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11670 debug_params->bestats_debug_params);
11671 }
11672 // BHIST
11673 if (debug_params->bhist_debug_params_valid == TRUE) {
11674 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11675 debug_params->bhist_debug_params);
11676 }
11677 // 3A Tuning
11678 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11679 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11680 debug_params->q3a_tuning_debug_params);
11681 }
11682 }
11683
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011684 // Add metadata which reprocess needs
11685 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11686 cam_reprocess_info_t *repro_info =
11687 (cam_reprocess_info_t *)frame_settings.find
11688 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011689 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011690 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011691 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011692 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011693 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011694 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011695 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011696 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011697 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011698 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011699 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011700 repro_info->pipeline_flip);
11701 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11702 repro_info->af_roi);
11703 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11704 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011705 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11706 CAM_INTF_PARM_ROTATION metadata then has been added in
11707 translateToHalMetadata. HAL need to keep this new rotation
11708 metadata. Otherwise, the old rotation info saved in the vendor tag
11709 would be used */
11710 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11711 CAM_INTF_PARM_ROTATION, reprocParam) {
11712 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11713 } else {
11714 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011715 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011716 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011717 }
11718
11719 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11720 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11721 roi.width and roi.height would be the final JPEG size.
11722 For now, HAL only checks this for reprocess request */
11723 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11724 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11725 uint8_t *enable =
11726 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11727 if (*enable == TRUE) {
11728 int32_t *crop_data =
11729 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11730 cam_stream_crop_info_t crop_meta;
11731 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11732 crop_meta.stream_id = 0;
11733 crop_meta.crop.left = crop_data[0];
11734 crop_meta.crop.top = crop_data[1];
11735 crop_meta.crop.width = crop_data[2];
11736 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011737 // The JPEG crop roi should match cpp output size
11738 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11739 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11740 crop_meta.roi_map.left = 0;
11741 crop_meta.roi_map.top = 0;
11742 crop_meta.roi_map.width = cpp_crop->crop.width;
11743 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011744 }
11745 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11746 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011747 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011748 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011749 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11750 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011751 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011752 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11753
11754 // Add JPEG scale information
11755 cam_dimension_t scale_dim;
11756 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11757 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11758 int32_t *roi =
11759 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11760 scale_dim.width = roi[2];
11761 scale_dim.height = roi[3];
11762 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11763 scale_dim);
11764 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11765 scale_dim.width, scale_dim.height, mCameraId);
11766 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011767 }
11768 }
11769
11770 return rc;
11771}
11772
11773/*===========================================================================
11774 * FUNCTION : saveRequestSettings
11775 *
11776 * DESCRIPTION: Add any settings that might have changed to the request settings
11777 * and save the settings to be applied on the frame
11778 *
11779 * PARAMETERS :
11780 * @jpegMetadata : the extracted and/or modified jpeg metadata
11781 * @request : request with initial settings
11782 *
11783 * RETURN :
11784 * camera_metadata_t* : pointer to the saved request settings
11785 *==========================================================================*/
11786camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11787 const CameraMetadata &jpegMetadata,
11788 camera3_capture_request_t *request)
11789{
11790 camera_metadata_t *resultMetadata;
11791 CameraMetadata camMetadata;
11792 camMetadata = request->settings;
11793
11794 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11795 int32_t thumbnail_size[2];
11796 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11797 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11798 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11799 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11800 }
11801
11802 if (request->input_buffer != NULL) {
11803 uint8_t reprocessFlags = 1;
11804 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11805 (uint8_t*)&reprocessFlags,
11806 sizeof(reprocessFlags));
11807 }
11808
11809 resultMetadata = camMetadata.release();
11810 return resultMetadata;
11811}
11812
11813/*===========================================================================
11814 * FUNCTION : setHalFpsRange
11815 *
11816 * DESCRIPTION: set FPS range parameter
11817 *
11818 *
11819 * PARAMETERS :
11820 * @settings : Metadata from framework
11821 * @hal_metadata: Metadata buffer
11822 *
11823 *
11824 * RETURN : success: NO_ERROR
11825 * failure:
11826 *==========================================================================*/
11827int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11828 metadata_buffer_t *hal_metadata)
11829{
11830 int32_t rc = NO_ERROR;
11831 cam_fps_range_t fps_range;
11832 fps_range.min_fps = (float)
11833 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11834 fps_range.max_fps = (float)
11835 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11836 fps_range.video_min_fps = fps_range.min_fps;
11837 fps_range.video_max_fps = fps_range.max_fps;
11838
11839 LOGD("aeTargetFpsRange fps: [%f %f]",
11840 fps_range.min_fps, fps_range.max_fps);
11841 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11842 * follows:
11843 * ---------------------------------------------------------------|
11844 * Video stream is absent in configure_streams |
11845 * (Camcorder preview before the first video record |
11846 * ---------------------------------------------------------------|
11847 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11848 * | | | vid_min/max_fps|
11849 * ---------------------------------------------------------------|
11850 * NO | [ 30, 240] | 240 | [240, 240] |
11851 * |-------------|-------------|----------------|
11852 * | [240, 240] | 240 | [240, 240] |
11853 * ---------------------------------------------------------------|
11854 * Video stream is present in configure_streams |
11855 * ---------------------------------------------------------------|
11856 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11857 * | | | vid_min/max_fps|
11858 * ---------------------------------------------------------------|
11859 * NO | [ 30, 240] | 240 | [240, 240] |
11860 * (camcorder prev |-------------|-------------|----------------|
11861 * after video rec | [240, 240] | 240 | [240, 240] |
11862 * is stopped) | | | |
11863 * ---------------------------------------------------------------|
11864 * YES | [ 30, 240] | 240 | [240, 240] |
11865 * |-------------|-------------|----------------|
11866 * | [240, 240] | 240 | [240, 240] |
11867 * ---------------------------------------------------------------|
11868 * When Video stream is absent in configure_streams,
11869 * preview fps = sensor_fps / batchsize
11870 * Eg: for 240fps at batchSize 4, preview = 60fps
11871 * for 120fps at batchSize 4, preview = 30fps
11872 *
11873 * When video stream is present in configure_streams, preview fps is as per
11874 * the ratio of preview buffers to video buffers requested in process
11875 * capture request
11876 */
11877 mBatchSize = 0;
11878 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11879 fps_range.min_fps = fps_range.video_max_fps;
11880 fps_range.video_min_fps = fps_range.video_max_fps;
11881 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11882 fps_range.max_fps);
11883 if (NAME_NOT_FOUND != val) {
11884 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11885 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11886 return BAD_VALUE;
11887 }
11888
11889 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11890 /* If batchmode is currently in progress and the fps changes,
11891 * set the flag to restart the sensor */
11892 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11893 (mHFRVideoFps != fps_range.max_fps)) {
11894 mNeedSensorRestart = true;
11895 }
11896 mHFRVideoFps = fps_range.max_fps;
11897 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11898 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11899 mBatchSize = MAX_HFR_BATCH_SIZE;
11900 }
11901 }
11902 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11903
11904 }
11905 } else {
11906 /* HFR mode is session param in backend/ISP. This should be reset when
11907 * in non-HFR mode */
11908 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11909 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11910 return BAD_VALUE;
11911 }
11912 }
11913 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11914 return BAD_VALUE;
11915 }
11916 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11917 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11918 return rc;
11919}
11920
11921/*===========================================================================
11922 * FUNCTION : translateToHalMetadata
11923 *
11924 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11925 *
11926 *
11927 * PARAMETERS :
11928 * @request : request sent from framework
11929 *
11930 *
11931 * RETURN : success: NO_ERROR
11932 * failure:
11933 *==========================================================================*/
11934int QCamera3HardwareInterface::translateToHalMetadata
11935 (const camera3_capture_request_t *request,
11936 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011937 uint32_t snapshotStreamId) {
11938 if (request == nullptr || hal_metadata == nullptr) {
11939 return BAD_VALUE;
11940 }
11941
11942 int64_t minFrameDuration = getMinFrameDuration(request);
11943
11944 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11945 minFrameDuration);
11946}
11947
11948int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11949 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11950 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11951
Thierry Strudel3d639192016-09-09 11:52:26 -070011952 int rc = 0;
11953 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011954 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011955
11956 /* Do not change the order of the following list unless you know what you are
11957 * doing.
11958 * The order is laid out in such a way that parameters in the front of the table
11959 * may be used to override the parameters later in the table. Examples are:
11960 * 1. META_MODE should precede AEC/AWB/AF MODE
11961 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11962 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11963 * 4. Any mode should precede it's corresponding settings
11964 */
11965 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11966 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11967 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11968 rc = BAD_VALUE;
11969 }
11970 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11971 if (rc != NO_ERROR) {
11972 LOGE("extractSceneMode failed");
11973 }
11974 }
11975
11976 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11977 uint8_t fwk_aeMode =
11978 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11979 uint8_t aeMode;
11980 int32_t redeye;
11981
11982 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11983 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011984 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11985 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011986 } else {
11987 aeMode = CAM_AE_MODE_ON;
11988 }
11989 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11990 redeye = 1;
11991 } else {
11992 redeye = 0;
11993 }
11994
11995 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11996 fwk_aeMode);
11997 if (NAME_NOT_FOUND != val) {
11998 int32_t flashMode = (int32_t)val;
11999 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12000 }
12001
12002 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12003 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12004 rc = BAD_VALUE;
12005 }
12006 }
12007
12008 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12009 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12010 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12011 fwk_whiteLevel);
12012 if (NAME_NOT_FOUND != val) {
12013 uint8_t whiteLevel = (uint8_t)val;
12014 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12015 rc = BAD_VALUE;
12016 }
12017 }
12018 }
12019
12020 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12021 uint8_t fwk_cacMode =
12022 frame_settings.find(
12023 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12024 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12025 fwk_cacMode);
12026 if (NAME_NOT_FOUND != val) {
12027 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12028 bool entryAvailable = FALSE;
12029 // Check whether Frameworks set CAC mode is supported in device or not
12030 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12031 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12032 entryAvailable = TRUE;
12033 break;
12034 }
12035 }
12036 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12037 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12038 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12039 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12040 if (entryAvailable == FALSE) {
12041 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12042 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12043 } else {
12044 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12045 // High is not supported and so set the FAST as spec say's underlying
12046 // device implementation can be the same for both modes.
12047 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12048 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12049 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12050 // in order to avoid the fps drop due to high quality
12051 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12052 } else {
12053 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12054 }
12055 }
12056 }
12057 LOGD("Final cacMode is %d", cacMode);
12058 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12059 rc = BAD_VALUE;
12060 }
12061 } else {
12062 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12063 }
12064 }
12065
Jason Lee84ae9972017-02-24 13:24:24 -080012066 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012067 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012068 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012069 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012070 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12071 fwk_focusMode);
12072 if (NAME_NOT_FOUND != val) {
12073 uint8_t focusMode = (uint8_t)val;
12074 LOGD("set focus mode %d", focusMode);
12075 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12076 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12077 rc = BAD_VALUE;
12078 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012079 }
12080 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012081 } else {
12082 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12083 LOGE("Focus forced to infinity %d", focusMode);
12084 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12085 rc = BAD_VALUE;
12086 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012087 }
12088
Jason Lee84ae9972017-02-24 13:24:24 -080012089 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12090 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012091 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12092 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12093 focalDistance)) {
12094 rc = BAD_VALUE;
12095 }
12096 }
12097
12098 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12099 uint8_t fwk_antibandingMode =
12100 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12101 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12102 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12103 if (NAME_NOT_FOUND != val) {
12104 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012105 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12106 if (m60HzZone) {
12107 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12108 } else {
12109 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12110 }
12111 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012112 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12113 hal_antibandingMode)) {
12114 rc = BAD_VALUE;
12115 }
12116 }
12117 }
12118
12119 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12120 int32_t expCompensation = frame_settings.find(
12121 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12122 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12123 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12124 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12125 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012126 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012127 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12128 expCompensation)) {
12129 rc = BAD_VALUE;
12130 }
12131 }
12132
12133 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12134 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12135 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12136 rc = BAD_VALUE;
12137 }
12138 }
12139 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12140 rc = setHalFpsRange(frame_settings, hal_metadata);
12141 if (rc != NO_ERROR) {
12142 LOGE("setHalFpsRange failed");
12143 }
12144 }
12145
12146 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12147 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12148 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12149 rc = BAD_VALUE;
12150 }
12151 }
12152
12153 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12154 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12155 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12156 fwk_effectMode);
12157 if (NAME_NOT_FOUND != val) {
12158 uint8_t effectMode = (uint8_t)val;
12159 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12160 rc = BAD_VALUE;
12161 }
12162 }
12163 }
12164
12165 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12166 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12167 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12168 colorCorrectMode)) {
12169 rc = BAD_VALUE;
12170 }
12171 }
12172
12173 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12174 cam_color_correct_gains_t colorCorrectGains;
12175 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12176 colorCorrectGains.gains[i] =
12177 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12178 }
12179 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12180 colorCorrectGains)) {
12181 rc = BAD_VALUE;
12182 }
12183 }
12184
12185 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12186 cam_color_correct_matrix_t colorCorrectTransform;
12187 cam_rational_type_t transform_elem;
12188 size_t num = 0;
12189 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12190 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12191 transform_elem.numerator =
12192 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12193 transform_elem.denominator =
12194 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12195 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12196 num++;
12197 }
12198 }
12199 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12200 colorCorrectTransform)) {
12201 rc = BAD_VALUE;
12202 }
12203 }
12204
12205 cam_trigger_t aecTrigger;
12206 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12207 aecTrigger.trigger_id = -1;
12208 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12209 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12210 aecTrigger.trigger =
12211 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12212 aecTrigger.trigger_id =
12213 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12214 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12215 aecTrigger)) {
12216 rc = BAD_VALUE;
12217 }
12218 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12219 aecTrigger.trigger, aecTrigger.trigger_id);
12220 }
12221
12222 /*af_trigger must come with a trigger id*/
12223 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12224 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12225 cam_trigger_t af_trigger;
12226 af_trigger.trigger =
12227 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12228 af_trigger.trigger_id =
12229 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12231 rc = BAD_VALUE;
12232 }
12233 LOGD("AfTrigger: %d AfTriggerID: %d",
12234 af_trigger.trigger, af_trigger.trigger_id);
12235 }
12236
12237 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12238 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12239 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12240 rc = BAD_VALUE;
12241 }
12242 }
12243 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12244 cam_edge_application_t edge_application;
12245 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012246
Thierry Strudel3d639192016-09-09 11:52:26 -070012247 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12248 edge_application.sharpness = 0;
12249 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012250 edge_application.sharpness =
12251 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12252 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12253 int32_t sharpness =
12254 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12255 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12256 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12257 LOGD("Setting edge mode sharpness %d", sharpness);
12258 edge_application.sharpness = sharpness;
12259 }
12260 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012261 }
12262 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12263 rc = BAD_VALUE;
12264 }
12265 }
12266
12267 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12268 int32_t respectFlashMode = 1;
12269 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12270 uint8_t fwk_aeMode =
12271 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012272 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12273 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12274 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012275 respectFlashMode = 0;
12276 LOGH("AE Mode controls flash, ignore android.flash.mode");
12277 }
12278 }
12279 if (respectFlashMode) {
12280 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12281 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12282 LOGH("flash mode after mapping %d", val);
12283 // To check: CAM_INTF_META_FLASH_MODE usage
12284 if (NAME_NOT_FOUND != val) {
12285 uint8_t flashMode = (uint8_t)val;
12286 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12287 rc = BAD_VALUE;
12288 }
12289 }
12290 }
12291 }
12292
12293 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12294 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12295 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12296 rc = BAD_VALUE;
12297 }
12298 }
12299
12300 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12301 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12302 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12303 flashFiringTime)) {
12304 rc = BAD_VALUE;
12305 }
12306 }
12307
12308 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12309 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12310 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12311 hotPixelMode)) {
12312 rc = BAD_VALUE;
12313 }
12314 }
12315
12316 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12317 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12318 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12319 lensAperture)) {
12320 rc = BAD_VALUE;
12321 }
12322 }
12323
12324 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12325 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12326 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12327 filterDensity)) {
12328 rc = BAD_VALUE;
12329 }
12330 }
12331
12332 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12333 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12334 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12335 focalLength)) {
12336 rc = BAD_VALUE;
12337 }
12338 }
12339
12340 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12341 uint8_t optStabMode =
12342 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12343 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12344 optStabMode)) {
12345 rc = BAD_VALUE;
12346 }
12347 }
12348
12349 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12350 uint8_t videoStabMode =
12351 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12352 LOGD("videoStabMode from APP = %d", videoStabMode);
12353 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12354 videoStabMode)) {
12355 rc = BAD_VALUE;
12356 }
12357 }
12358
12359
12360 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12361 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12362 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12363 noiseRedMode)) {
12364 rc = BAD_VALUE;
12365 }
12366 }
12367
12368 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12369 float reprocessEffectiveExposureFactor =
12370 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12371 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12372 reprocessEffectiveExposureFactor)) {
12373 rc = BAD_VALUE;
12374 }
12375 }
12376
12377 cam_crop_region_t scalerCropRegion;
12378 bool scalerCropSet = false;
12379 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12380 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12381 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12382 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12383 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12384
12385 // Map coordinate system from active array to sensor output.
12386 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12387 scalerCropRegion.width, scalerCropRegion.height);
12388
12389 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12390 scalerCropRegion)) {
12391 rc = BAD_VALUE;
12392 }
12393 scalerCropSet = true;
12394 }
12395
12396 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12397 int64_t sensorExpTime =
12398 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12399 LOGD("setting sensorExpTime %lld", sensorExpTime);
12400 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12401 sensorExpTime)) {
12402 rc = BAD_VALUE;
12403 }
12404 }
12405
12406 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12407 int64_t sensorFrameDuration =
12408 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012409 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12410 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12411 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12412 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12413 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12414 sensorFrameDuration)) {
12415 rc = BAD_VALUE;
12416 }
12417 }
12418
12419 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12420 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12421 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12422 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12423 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12424 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12425 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12426 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12427 sensorSensitivity)) {
12428 rc = BAD_VALUE;
12429 }
12430 }
12431
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012432#ifndef USE_HAL_3_3
12433 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12434 int32_t ispSensitivity =
12435 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12436 if (ispSensitivity <
12437 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12438 ispSensitivity =
12439 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12440 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12441 }
12442 if (ispSensitivity >
12443 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12444 ispSensitivity =
12445 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12446 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12447 }
12448 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12449 ispSensitivity)) {
12450 rc = BAD_VALUE;
12451 }
12452 }
12453#endif
12454
Thierry Strudel3d639192016-09-09 11:52:26 -070012455 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12456 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12457 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12458 rc = BAD_VALUE;
12459 }
12460 }
12461
12462 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12463 uint8_t fwk_facedetectMode =
12464 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12465
12466 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12467 fwk_facedetectMode);
12468
12469 if (NAME_NOT_FOUND != val) {
12470 uint8_t facedetectMode = (uint8_t)val;
12471 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12472 facedetectMode)) {
12473 rc = BAD_VALUE;
12474 }
12475 }
12476 }
12477
Thierry Strudel54dc9782017-02-15 12:12:10 -080012478 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012479 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012480 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012481 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12482 histogramMode)) {
12483 rc = BAD_VALUE;
12484 }
12485 }
12486
12487 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12488 uint8_t sharpnessMapMode =
12489 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12490 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12491 sharpnessMapMode)) {
12492 rc = BAD_VALUE;
12493 }
12494 }
12495
12496 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12497 uint8_t tonemapMode =
12498 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12499 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12500 rc = BAD_VALUE;
12501 }
12502 }
12503 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12504 /*All tonemap channels will have the same number of points*/
12505 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12506 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12507 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12508 cam_rgb_tonemap_curves tonemapCurves;
12509 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12510 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12511 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12512 tonemapCurves.tonemap_points_cnt,
12513 CAM_MAX_TONEMAP_CURVE_SIZE);
12514 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12515 }
12516
12517 /* ch0 = G*/
12518 size_t point = 0;
12519 cam_tonemap_curve_t tonemapCurveGreen;
12520 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12521 for (size_t j = 0; j < 2; j++) {
12522 tonemapCurveGreen.tonemap_points[i][j] =
12523 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12524 point++;
12525 }
12526 }
12527 tonemapCurves.curves[0] = tonemapCurveGreen;
12528
12529 /* ch 1 = B */
12530 point = 0;
12531 cam_tonemap_curve_t tonemapCurveBlue;
12532 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12533 for (size_t j = 0; j < 2; j++) {
12534 tonemapCurveBlue.tonemap_points[i][j] =
12535 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12536 point++;
12537 }
12538 }
12539 tonemapCurves.curves[1] = tonemapCurveBlue;
12540
12541 /* ch 2 = R */
12542 point = 0;
12543 cam_tonemap_curve_t tonemapCurveRed;
12544 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12545 for (size_t j = 0; j < 2; j++) {
12546 tonemapCurveRed.tonemap_points[i][j] =
12547 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12548 point++;
12549 }
12550 }
12551 tonemapCurves.curves[2] = tonemapCurveRed;
12552
12553 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12554 tonemapCurves)) {
12555 rc = BAD_VALUE;
12556 }
12557 }
12558
12559 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12560 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12561 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12562 captureIntent)) {
12563 rc = BAD_VALUE;
12564 }
12565 }
12566
12567 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12568 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12569 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12570 blackLevelLock)) {
12571 rc = BAD_VALUE;
12572 }
12573 }
12574
12575 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12576 uint8_t lensShadingMapMode =
12577 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12578 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12579 lensShadingMapMode)) {
12580 rc = BAD_VALUE;
12581 }
12582 }
12583
12584 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12585 cam_area_t roi;
12586 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012587 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012588
12589 // Map coordinate system from active array to sensor output.
12590 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12591 roi.rect.height);
12592
12593 if (scalerCropSet) {
12594 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12595 }
12596 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12597 rc = BAD_VALUE;
12598 }
12599 }
12600
12601 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12602 cam_area_t roi;
12603 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012604 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012605
12606 // Map coordinate system from active array to sensor output.
12607 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12608 roi.rect.height);
12609
12610 if (scalerCropSet) {
12611 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12612 }
12613 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12614 rc = BAD_VALUE;
12615 }
12616 }
12617
12618 // CDS for non-HFR non-video mode
12619 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12620 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12621 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12622 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12623 LOGE("Invalid CDS mode %d!", *fwk_cds);
12624 } else {
12625 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12626 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12627 rc = BAD_VALUE;
12628 }
12629 }
12630 }
12631
Thierry Strudel04e026f2016-10-10 11:27:36 -070012632 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012633 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012634 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012635 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12636 }
12637 if (m_bVideoHdrEnabled)
12638 vhdr = CAM_VIDEO_HDR_MODE_ON;
12639
Thierry Strudel54dc9782017-02-15 12:12:10 -080012640 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12641
12642 if(vhdr != curr_hdr_state)
12643 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12644
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012645 rc = setVideoHdrMode(mParameters, vhdr);
12646 if (rc != NO_ERROR) {
12647 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012648 }
12649
12650 //IR
12651 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12652 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12653 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012654 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12655 uint8_t isIRon = 0;
12656
12657 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012658 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12659 LOGE("Invalid IR mode %d!", fwk_ir);
12660 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012661 if(isIRon != curr_ir_state )
12662 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12663
Thierry Strudel04e026f2016-10-10 11:27:36 -070012664 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12665 CAM_INTF_META_IR_MODE, fwk_ir)) {
12666 rc = BAD_VALUE;
12667 }
12668 }
12669 }
12670
Thierry Strudel54dc9782017-02-15 12:12:10 -080012671 //Binning Correction Mode
12672 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12673 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12674 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12675 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12676 || (0 > fwk_binning_correction)) {
12677 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12678 } else {
12679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12680 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12681 rc = BAD_VALUE;
12682 }
12683 }
12684 }
12685
Thierry Strudel269c81a2016-10-12 12:13:59 -070012686 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12687 float aec_speed;
12688 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12689 LOGD("AEC Speed :%f", aec_speed);
12690 if ( aec_speed < 0 ) {
12691 LOGE("Invalid AEC mode %f!", aec_speed);
12692 } else {
12693 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12694 aec_speed)) {
12695 rc = BAD_VALUE;
12696 }
12697 }
12698 }
12699
12700 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12701 float awb_speed;
12702 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12703 LOGD("AWB Speed :%f", awb_speed);
12704 if ( awb_speed < 0 ) {
12705 LOGE("Invalid AWB mode %f!", awb_speed);
12706 } else {
12707 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12708 awb_speed)) {
12709 rc = BAD_VALUE;
12710 }
12711 }
12712 }
12713
Thierry Strudel3d639192016-09-09 11:52:26 -070012714 // TNR
12715 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12716 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12717 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012718 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012719 cam_denoise_param_t tnr;
12720 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12721 tnr.process_plates =
12722 (cam_denoise_process_type_t)frame_settings.find(
12723 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12724 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012725
12726 if(b_TnrRequested != curr_tnr_state)
12727 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12728
Thierry Strudel3d639192016-09-09 11:52:26 -070012729 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12730 rc = BAD_VALUE;
12731 }
12732 }
12733
Thierry Strudel54dc9782017-02-15 12:12:10 -080012734 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012735 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012736 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012737 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12738 *exposure_metering_mode)) {
12739 rc = BAD_VALUE;
12740 }
12741 }
12742
Thierry Strudel3d639192016-09-09 11:52:26 -070012743 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12744 int32_t fwk_testPatternMode =
12745 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12746 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12747 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12748
12749 if (NAME_NOT_FOUND != testPatternMode) {
12750 cam_test_pattern_data_t testPatternData;
12751 memset(&testPatternData, 0, sizeof(testPatternData));
12752 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12753 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12754 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12755 int32_t *fwk_testPatternData =
12756 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12757 testPatternData.r = fwk_testPatternData[0];
12758 testPatternData.b = fwk_testPatternData[3];
12759 switch (gCamCapability[mCameraId]->color_arrangement) {
12760 case CAM_FILTER_ARRANGEMENT_RGGB:
12761 case CAM_FILTER_ARRANGEMENT_GRBG:
12762 testPatternData.gr = fwk_testPatternData[1];
12763 testPatternData.gb = fwk_testPatternData[2];
12764 break;
12765 case CAM_FILTER_ARRANGEMENT_GBRG:
12766 case CAM_FILTER_ARRANGEMENT_BGGR:
12767 testPatternData.gr = fwk_testPatternData[2];
12768 testPatternData.gb = fwk_testPatternData[1];
12769 break;
12770 default:
12771 LOGE("color arrangement %d is not supported",
12772 gCamCapability[mCameraId]->color_arrangement);
12773 break;
12774 }
12775 }
12776 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12777 testPatternData)) {
12778 rc = BAD_VALUE;
12779 }
12780 } else {
12781 LOGE("Invalid framework sensor test pattern mode %d",
12782 fwk_testPatternMode);
12783 }
12784 }
12785
12786 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12787 size_t count = 0;
12788 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12789 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12790 gps_coords.data.d, gps_coords.count, count);
12791 if (gps_coords.count != count) {
12792 rc = BAD_VALUE;
12793 }
12794 }
12795
12796 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12797 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12798 size_t count = 0;
12799 const char *gps_methods_src = (const char *)
12800 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12801 memset(gps_methods, '\0', sizeof(gps_methods));
12802 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12803 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12804 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12805 if (GPS_PROCESSING_METHOD_SIZE != count) {
12806 rc = BAD_VALUE;
12807 }
12808 }
12809
12810 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12811 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12812 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12813 gps_timestamp)) {
12814 rc = BAD_VALUE;
12815 }
12816 }
12817
12818 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12819 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12820 cam_rotation_info_t rotation_info;
12821 if (orientation == 0) {
12822 rotation_info.rotation = ROTATE_0;
12823 } else if (orientation == 90) {
12824 rotation_info.rotation = ROTATE_90;
12825 } else if (orientation == 180) {
12826 rotation_info.rotation = ROTATE_180;
12827 } else if (orientation == 270) {
12828 rotation_info.rotation = ROTATE_270;
12829 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012830 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012831 rotation_info.streamId = snapshotStreamId;
12832 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12834 rc = BAD_VALUE;
12835 }
12836 }
12837
12838 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12839 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12841 rc = BAD_VALUE;
12842 }
12843 }
12844
12845 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12846 uint32_t thumb_quality = (uint32_t)
12847 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12848 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12849 thumb_quality)) {
12850 rc = BAD_VALUE;
12851 }
12852 }
12853
12854 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12855 cam_dimension_t dim;
12856 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12857 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12858 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12859 rc = BAD_VALUE;
12860 }
12861 }
12862
12863 // Internal metadata
12864 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12865 size_t count = 0;
12866 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12867 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12868 privatedata.data.i32, privatedata.count, count);
12869 if (privatedata.count != count) {
12870 rc = BAD_VALUE;
12871 }
12872 }
12873
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012874 // ISO/Exposure Priority
12875 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12876 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12877 cam_priority_mode_t mode =
12878 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12879 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12880 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12881 use_iso_exp_pty.previewOnly = FALSE;
12882 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12883 use_iso_exp_pty.value = *ptr;
12884
12885 if(CAM_ISO_PRIORITY == mode) {
12886 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12887 use_iso_exp_pty)) {
12888 rc = BAD_VALUE;
12889 }
12890 }
12891 else {
12892 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12893 use_iso_exp_pty)) {
12894 rc = BAD_VALUE;
12895 }
12896 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012897
12898 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12899 rc = BAD_VALUE;
12900 }
12901 }
12902 } else {
12903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12904 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012905 }
12906 }
12907
12908 // Saturation
12909 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12910 int32_t* use_saturation =
12911 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12912 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12913 rc = BAD_VALUE;
12914 }
12915 }
12916
Thierry Strudel3d639192016-09-09 11:52:26 -070012917 // EV step
12918 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12919 gCamCapability[mCameraId]->exp_compensation_step)) {
12920 rc = BAD_VALUE;
12921 }
12922
12923 // CDS info
12924 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12925 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12926 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12927
12928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12929 CAM_INTF_META_CDS_DATA, *cdsData)) {
12930 rc = BAD_VALUE;
12931 }
12932 }
12933
Shuzhen Wang19463d72016-03-08 11:09:52 -080012934 // Hybrid AE
12935 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12936 uint8_t *hybrid_ae = (uint8_t *)
12937 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12938
12939 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12940 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12941 rc = BAD_VALUE;
12942 }
12943 }
12944
Shuzhen Wang14415f52016-11-16 18:26:18 -080012945 // Histogram
12946 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12947 uint8_t histogramMode =
12948 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12949 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12950 histogramMode)) {
12951 rc = BAD_VALUE;
12952 }
12953 }
12954
12955 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12956 int32_t histogramBins =
12957 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12958 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12959 histogramBins)) {
12960 rc = BAD_VALUE;
12961 }
12962 }
12963
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012964 // Tracking AF
12965 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12966 uint8_t trackingAfTrigger =
12967 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12969 trackingAfTrigger)) {
12970 rc = BAD_VALUE;
12971 }
12972 }
12973
Thierry Strudel3d639192016-09-09 11:52:26 -070012974 return rc;
12975}
12976
12977/*===========================================================================
12978 * FUNCTION : captureResultCb
12979 *
12980 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12981 *
12982 * PARAMETERS :
12983 * @frame : frame information from mm-camera-interface
12984 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12985 * @userdata: userdata
12986 *
12987 * RETURN : NONE
12988 *==========================================================================*/
12989void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12990 camera3_stream_buffer_t *buffer,
12991 uint32_t frame_number, bool isInputBuffer, void *userdata)
12992{
12993 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12994 if (hw == NULL) {
12995 LOGE("Invalid hw %p", hw);
12996 return;
12997 }
12998
12999 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13000 return;
13001}
13002
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013003/*===========================================================================
13004 * FUNCTION : setBufferErrorStatus
13005 *
13006 * DESCRIPTION: Callback handler for channels to report any buffer errors
13007 *
13008 * PARAMETERS :
13009 * @ch : Channel on which buffer error is reported from
13010 * @frame_number : frame number on which buffer error is reported on
13011 * @buffer_status : buffer error status
13012 * @userdata: userdata
13013 *
13014 * RETURN : NONE
13015 *==========================================================================*/
13016void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13017 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13018{
13019 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13020 if (hw == NULL) {
13021 LOGE("Invalid hw %p", hw);
13022 return;
13023 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013024
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013025 hw->setBufferErrorStatus(ch, frame_number, err);
13026 return;
13027}
13028
13029void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13030 uint32_t frameNumber, camera3_buffer_status_t err)
13031{
13032 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13033 pthread_mutex_lock(&mMutex);
13034
13035 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13036 if (req.frame_number != frameNumber)
13037 continue;
13038 for (auto& k : req.mPendingBufferList) {
13039 if(k.stream->priv == ch) {
13040 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13041 }
13042 }
13043 }
13044
13045 pthread_mutex_unlock(&mMutex);
13046 return;
13047}
Thierry Strudel3d639192016-09-09 11:52:26 -070013048/*===========================================================================
13049 * FUNCTION : initialize
13050 *
13051 * DESCRIPTION: Pass framework callback pointers to HAL
13052 *
13053 * PARAMETERS :
13054 *
13055 *
13056 * RETURN : Success : 0
13057 * Failure: -ENODEV
13058 *==========================================================================*/
13059
13060int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13061 const camera3_callback_ops_t *callback_ops)
13062{
13063 LOGD("E");
13064 QCamera3HardwareInterface *hw =
13065 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13066 if (!hw) {
13067 LOGE("NULL camera device");
13068 return -ENODEV;
13069 }
13070
13071 int rc = hw->initialize(callback_ops);
13072 LOGD("X");
13073 return rc;
13074}
13075
13076/*===========================================================================
13077 * FUNCTION : configure_streams
13078 *
13079 * DESCRIPTION:
13080 *
13081 * PARAMETERS :
13082 *
13083 *
13084 * RETURN : Success: 0
13085 * Failure: -EINVAL (if stream configuration is invalid)
13086 * -ENODEV (fatal error)
13087 *==========================================================================*/
13088
13089int QCamera3HardwareInterface::configure_streams(
13090 const struct camera3_device *device,
13091 camera3_stream_configuration_t *stream_list)
13092{
13093 LOGD("E");
13094 QCamera3HardwareInterface *hw =
13095 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13096 if (!hw) {
13097 LOGE("NULL camera device");
13098 return -ENODEV;
13099 }
13100 int rc = hw->configureStreams(stream_list);
13101 LOGD("X");
13102 return rc;
13103}
13104
13105/*===========================================================================
13106 * FUNCTION : construct_default_request_settings
13107 *
13108 * DESCRIPTION: Configure a settings buffer to meet the required use case
13109 *
13110 * PARAMETERS :
13111 *
13112 *
13113 * RETURN : Success: Return valid metadata
13114 * Failure: Return NULL
13115 *==========================================================================*/
13116const camera_metadata_t* QCamera3HardwareInterface::
13117 construct_default_request_settings(const struct camera3_device *device,
13118 int type)
13119{
13120
13121 LOGD("E");
13122 camera_metadata_t* fwk_metadata = NULL;
13123 QCamera3HardwareInterface *hw =
13124 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13125 if (!hw) {
13126 LOGE("NULL camera device");
13127 return NULL;
13128 }
13129
13130 fwk_metadata = hw->translateCapabilityToMetadata(type);
13131
13132 LOGD("X");
13133 return fwk_metadata;
13134}
13135
13136/*===========================================================================
13137 * FUNCTION : process_capture_request
13138 *
13139 * DESCRIPTION:
13140 *
13141 * PARAMETERS :
13142 *
13143 *
13144 * RETURN :
13145 *==========================================================================*/
13146int QCamera3HardwareInterface::process_capture_request(
13147 const struct camera3_device *device,
13148 camera3_capture_request_t *request)
13149{
13150 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013151 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013152 QCamera3HardwareInterface *hw =
13153 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13154 if (!hw) {
13155 LOGE("NULL camera device");
13156 return -EINVAL;
13157 }
13158
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013159 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013160 LOGD("X");
13161 return rc;
13162}
13163
13164/*===========================================================================
13165 * FUNCTION : dump
13166 *
13167 * DESCRIPTION:
13168 *
13169 * PARAMETERS :
13170 *
13171 *
13172 * RETURN :
13173 *==========================================================================*/
13174
13175void QCamera3HardwareInterface::dump(
13176 const struct camera3_device *device, int fd)
13177{
13178 /* Log level property is read when "adb shell dumpsys media.camera" is
13179 called so that the log level can be controlled without restarting
13180 the media server */
13181 getLogLevel();
13182
13183 LOGD("E");
13184 QCamera3HardwareInterface *hw =
13185 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13186 if (!hw) {
13187 LOGE("NULL camera device");
13188 return;
13189 }
13190
13191 hw->dump(fd);
13192 LOGD("X");
13193 return;
13194}
13195
13196/*===========================================================================
13197 * FUNCTION : flush
13198 *
13199 * DESCRIPTION:
13200 *
13201 * PARAMETERS :
13202 *
13203 *
13204 * RETURN :
13205 *==========================================================================*/
13206
13207int QCamera3HardwareInterface::flush(
13208 const struct camera3_device *device)
13209{
13210 int rc;
13211 LOGD("E");
13212 QCamera3HardwareInterface *hw =
13213 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13214 if (!hw) {
13215 LOGE("NULL camera device");
13216 return -EINVAL;
13217 }
13218
13219 pthread_mutex_lock(&hw->mMutex);
13220 // Validate current state
13221 switch (hw->mState) {
13222 case STARTED:
13223 /* valid state */
13224 break;
13225
13226 case ERROR:
13227 pthread_mutex_unlock(&hw->mMutex);
13228 hw->handleCameraDeviceError();
13229 return -ENODEV;
13230
13231 default:
13232 LOGI("Flush returned during state %d", hw->mState);
13233 pthread_mutex_unlock(&hw->mMutex);
13234 return 0;
13235 }
13236 pthread_mutex_unlock(&hw->mMutex);
13237
13238 rc = hw->flush(true /* restart channels */ );
13239 LOGD("X");
13240 return rc;
13241}
13242
13243/*===========================================================================
13244 * FUNCTION : close_camera_device
13245 *
13246 * DESCRIPTION:
13247 *
13248 * PARAMETERS :
13249 *
13250 *
13251 * RETURN :
13252 *==========================================================================*/
13253int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13254{
13255 int ret = NO_ERROR;
13256 QCamera3HardwareInterface *hw =
13257 reinterpret_cast<QCamera3HardwareInterface *>(
13258 reinterpret_cast<camera3_device_t *>(device)->priv);
13259 if (!hw) {
13260 LOGE("NULL camera device");
13261 return BAD_VALUE;
13262 }
13263
13264 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13265 delete hw;
13266 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013267 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013268 return ret;
13269}
13270
13271/*===========================================================================
13272 * FUNCTION : getWaveletDenoiseProcessPlate
13273 *
13274 * DESCRIPTION: query wavelet denoise process plate
13275 *
13276 * PARAMETERS : None
13277 *
13278 * RETURN : WNR prcocess plate value
13279 *==========================================================================*/
13280cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13281{
13282 char prop[PROPERTY_VALUE_MAX];
13283 memset(prop, 0, sizeof(prop));
13284 property_get("persist.denoise.process.plates", prop, "0");
13285 int processPlate = atoi(prop);
13286 switch(processPlate) {
13287 case 0:
13288 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13289 case 1:
13290 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13291 case 2:
13292 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13293 case 3:
13294 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13295 default:
13296 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13297 }
13298}
13299
13300
13301/*===========================================================================
13302 * FUNCTION : getTemporalDenoiseProcessPlate
13303 *
13304 * DESCRIPTION: query temporal denoise process plate
13305 *
13306 * PARAMETERS : None
13307 *
13308 * RETURN : TNR prcocess plate value
13309 *==========================================================================*/
13310cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13311{
13312 char prop[PROPERTY_VALUE_MAX];
13313 memset(prop, 0, sizeof(prop));
13314 property_get("persist.tnr.process.plates", prop, "0");
13315 int processPlate = atoi(prop);
13316 switch(processPlate) {
13317 case 0:
13318 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13319 case 1:
13320 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13321 case 2:
13322 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13323 case 3:
13324 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13325 default:
13326 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13327 }
13328}
13329
13330
13331/*===========================================================================
13332 * FUNCTION : extractSceneMode
13333 *
13334 * DESCRIPTION: Extract scene mode from frameworks set metadata
13335 *
13336 * PARAMETERS :
13337 * @frame_settings: CameraMetadata reference
13338 * @metaMode: ANDROID_CONTORL_MODE
13339 * @hal_metadata: hal metadata structure
13340 *
13341 * RETURN : None
13342 *==========================================================================*/
13343int32_t QCamera3HardwareInterface::extractSceneMode(
13344 const CameraMetadata &frame_settings, uint8_t metaMode,
13345 metadata_buffer_t *hal_metadata)
13346{
13347 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013348 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13349
13350 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13351 LOGD("Ignoring control mode OFF_KEEP_STATE");
13352 return NO_ERROR;
13353 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013354
13355 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13356 camera_metadata_ro_entry entry =
13357 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13358 if (0 == entry.count)
13359 return rc;
13360
13361 uint8_t fwk_sceneMode = entry.data.u8[0];
13362
13363 int val = lookupHalName(SCENE_MODES_MAP,
13364 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13365 fwk_sceneMode);
13366 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013367 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013368 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013369 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013370 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013371
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013372 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13373 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13374 }
13375
13376 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13377 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013378 cam_hdr_param_t hdr_params;
13379 hdr_params.hdr_enable = 1;
13380 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13381 hdr_params.hdr_need_1x = false;
13382 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13383 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13384 rc = BAD_VALUE;
13385 }
13386 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013387
Thierry Strudel3d639192016-09-09 11:52:26 -070013388 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13389 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13390 rc = BAD_VALUE;
13391 }
13392 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013393
13394 if (mForceHdrSnapshot) {
13395 cam_hdr_param_t hdr_params;
13396 hdr_params.hdr_enable = 1;
13397 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13398 hdr_params.hdr_need_1x = false;
13399 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13400 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13401 rc = BAD_VALUE;
13402 }
13403 }
13404
Thierry Strudel3d639192016-09-09 11:52:26 -070013405 return rc;
13406}
13407
13408/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013409 * FUNCTION : setVideoHdrMode
13410 *
13411 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13412 *
13413 * PARAMETERS :
13414 * @hal_metadata: hal metadata structure
13415 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13416 *
13417 * RETURN : None
13418 *==========================================================================*/
13419int32_t QCamera3HardwareInterface::setVideoHdrMode(
13420 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13421{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013422 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13423 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13424 }
13425
13426 LOGE("Invalid Video HDR mode %d!", vhdr);
13427 return BAD_VALUE;
13428}
13429
13430/*===========================================================================
13431 * FUNCTION : setSensorHDR
13432 *
13433 * DESCRIPTION: Enable/disable sensor HDR.
13434 *
13435 * PARAMETERS :
13436 * @hal_metadata: hal metadata structure
13437 * @enable: boolean whether to enable/disable sensor HDR
13438 *
13439 * RETURN : None
13440 *==========================================================================*/
13441int32_t QCamera3HardwareInterface::setSensorHDR(
13442 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13443{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013444 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013445 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13446
13447 if (enable) {
13448 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13449 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13450 #ifdef _LE_CAMERA_
13451 //Default to staggered HDR for IOT
13452 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13453 #else
13454 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13455 #endif
13456 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13457 }
13458
13459 bool isSupported = false;
13460 switch (sensor_hdr) {
13461 case CAM_SENSOR_HDR_IN_SENSOR:
13462 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13463 CAM_QCOM_FEATURE_SENSOR_HDR) {
13464 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013465 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013466 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013467 break;
13468 case CAM_SENSOR_HDR_ZIGZAG:
13469 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13470 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13471 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013472 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013473 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013474 break;
13475 case CAM_SENSOR_HDR_STAGGERED:
13476 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13477 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13478 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013479 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013480 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013481 break;
13482 case CAM_SENSOR_HDR_OFF:
13483 isSupported = true;
13484 LOGD("Turning off sensor HDR");
13485 break;
13486 default:
13487 LOGE("HDR mode %d not supported", sensor_hdr);
13488 rc = BAD_VALUE;
13489 break;
13490 }
13491
13492 if(isSupported) {
13493 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13494 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13495 rc = BAD_VALUE;
13496 } else {
13497 if(!isVideoHdrEnable)
13498 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013499 }
13500 }
13501 return rc;
13502}
13503
13504/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013505 * FUNCTION : needRotationReprocess
13506 *
13507 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13508 *
13509 * PARAMETERS : none
13510 *
13511 * RETURN : true: needed
13512 * false: no need
13513 *==========================================================================*/
13514bool QCamera3HardwareInterface::needRotationReprocess()
13515{
13516 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13517 // current rotation is not zero, and pp has the capability to process rotation
13518 LOGH("need do reprocess for rotation");
13519 return true;
13520 }
13521
13522 return false;
13523}
13524
13525/*===========================================================================
13526 * FUNCTION : needReprocess
13527 *
13528 * DESCRIPTION: if reprocess in needed
13529 *
13530 * PARAMETERS : none
13531 *
13532 * RETURN : true: needed
13533 * false: no need
13534 *==========================================================================*/
13535bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13536{
13537 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13538 // TODO: add for ZSL HDR later
13539 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13540 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13541 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13542 return true;
13543 } else {
13544 LOGH("already post processed frame");
13545 return false;
13546 }
13547 }
13548 return needRotationReprocess();
13549}
13550
13551/*===========================================================================
13552 * FUNCTION : needJpegExifRotation
13553 *
13554 * DESCRIPTION: if rotation from jpeg is needed
13555 *
13556 * PARAMETERS : none
13557 *
13558 * RETURN : true: needed
13559 * false: no need
13560 *==========================================================================*/
13561bool QCamera3HardwareInterface::needJpegExifRotation()
13562{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013563 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013564 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13565 LOGD("Need use Jpeg EXIF Rotation");
13566 return true;
13567 }
13568 return false;
13569}
13570
13571/*===========================================================================
13572 * FUNCTION : addOfflineReprocChannel
13573 *
13574 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13575 * coming from input channel
13576 *
13577 * PARAMETERS :
13578 * @config : reprocess configuration
13579 * @inputChHandle : pointer to the input (source) channel
13580 *
13581 *
13582 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13583 *==========================================================================*/
13584QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13585 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13586{
13587 int32_t rc = NO_ERROR;
13588 QCamera3ReprocessChannel *pChannel = NULL;
13589
13590 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013591 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13592 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013593 if (NULL == pChannel) {
13594 LOGE("no mem for reprocess channel");
13595 return NULL;
13596 }
13597
13598 rc = pChannel->initialize(IS_TYPE_NONE);
13599 if (rc != NO_ERROR) {
13600 LOGE("init reprocess channel failed, ret = %d", rc);
13601 delete pChannel;
13602 return NULL;
13603 }
13604
13605 // pp feature config
13606 cam_pp_feature_config_t pp_config;
13607 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13608
13609 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13610 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13611 & CAM_QCOM_FEATURE_DSDN) {
13612 //Use CPP CDS incase h/w supports it.
13613 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13614 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13615 }
13616 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13617 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13618 }
13619
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013620 if (config.hdr_param.hdr_enable) {
13621 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13622 pp_config.hdr_param = config.hdr_param;
13623 }
13624
13625 if (mForceHdrSnapshot) {
13626 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13627 pp_config.hdr_param.hdr_enable = 1;
13628 pp_config.hdr_param.hdr_need_1x = 0;
13629 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13630 }
13631
Thierry Strudel3d639192016-09-09 11:52:26 -070013632 rc = pChannel->addReprocStreamsFromSource(pp_config,
13633 config,
13634 IS_TYPE_NONE,
13635 mMetadataChannel);
13636
13637 if (rc != NO_ERROR) {
13638 delete pChannel;
13639 return NULL;
13640 }
13641 return pChannel;
13642}
13643
13644/*===========================================================================
13645 * FUNCTION : getMobicatMask
13646 *
13647 * DESCRIPTION: returns mobicat mask
13648 *
13649 * PARAMETERS : none
13650 *
13651 * RETURN : mobicat mask
13652 *
13653 *==========================================================================*/
13654uint8_t QCamera3HardwareInterface::getMobicatMask()
13655{
13656 return m_MobicatMask;
13657}
13658
13659/*===========================================================================
13660 * FUNCTION : setMobicat
13661 *
13662 * DESCRIPTION: set Mobicat on/off.
13663 *
13664 * PARAMETERS :
13665 * @params : none
13666 *
13667 * RETURN : int32_t type of status
13668 * NO_ERROR -- success
13669 * none-zero failure code
13670 *==========================================================================*/
13671int32_t QCamera3HardwareInterface::setMobicat()
13672{
Thierry Strudel3d639192016-09-09 11:52:26 -070013673 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013674
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013675 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013676 tune_cmd_t tune_cmd;
13677 tune_cmd.type = SET_RELOAD_CHROMATIX;
13678 tune_cmd.module = MODULE_ALL;
13679 tune_cmd.value = TRUE;
13680 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13681 CAM_INTF_PARM_SET_VFE_COMMAND,
13682 tune_cmd);
13683
13684 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13685 CAM_INTF_PARM_SET_PP_COMMAND,
13686 tune_cmd);
13687 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013688
13689 return ret;
13690}
13691
13692/*===========================================================================
13693* FUNCTION : getLogLevel
13694*
13695* DESCRIPTION: Reads the log level property into a variable
13696*
13697* PARAMETERS :
13698* None
13699*
13700* RETURN :
13701* None
13702*==========================================================================*/
13703void QCamera3HardwareInterface::getLogLevel()
13704{
13705 char prop[PROPERTY_VALUE_MAX];
13706 uint32_t globalLogLevel = 0;
13707
13708 property_get("persist.camera.hal.debug", prop, "0");
13709 int val = atoi(prop);
13710 if (0 <= val) {
13711 gCamHal3LogLevel = (uint32_t)val;
13712 }
13713
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013714 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013715 gKpiDebugLevel = atoi(prop);
13716
13717 property_get("persist.camera.global.debug", prop, "0");
13718 val = atoi(prop);
13719 if (0 <= val) {
13720 globalLogLevel = (uint32_t)val;
13721 }
13722
13723 /* Highest log level among hal.logs and global.logs is selected */
13724 if (gCamHal3LogLevel < globalLogLevel)
13725 gCamHal3LogLevel = globalLogLevel;
13726
13727 return;
13728}
13729
13730/*===========================================================================
13731 * FUNCTION : validateStreamRotations
13732 *
13733 * DESCRIPTION: Check if the rotations requested are supported
13734 *
13735 * PARAMETERS :
13736 * @stream_list : streams to be configured
13737 *
13738 * RETURN : NO_ERROR on success
13739 * -EINVAL on failure
13740 *
13741 *==========================================================================*/
13742int QCamera3HardwareInterface::validateStreamRotations(
13743 camera3_stream_configuration_t *streamList)
13744{
13745 int rc = NO_ERROR;
13746
13747 /*
13748 * Loop through all streams requested in configuration
13749 * Check if unsupported rotations have been requested on any of them
13750 */
13751 for (size_t j = 0; j < streamList->num_streams; j++){
13752 camera3_stream_t *newStream = streamList->streams[j];
13753
Emilian Peev35ceeed2017-06-29 11:58:56 -070013754 switch(newStream->rotation) {
13755 case CAMERA3_STREAM_ROTATION_0:
13756 case CAMERA3_STREAM_ROTATION_90:
13757 case CAMERA3_STREAM_ROTATION_180:
13758 case CAMERA3_STREAM_ROTATION_270:
13759 //Expected values
13760 break;
13761 default:
13762 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13763 "type:%d and stream format:%d", __func__,
13764 newStream->rotation, newStream->stream_type,
13765 newStream->format);
13766 return -EINVAL;
13767 }
13768
Thierry Strudel3d639192016-09-09 11:52:26 -070013769 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13770 bool isImplDef = (newStream->format ==
13771 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13772 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13773 isImplDef);
13774
13775 if (isRotated && (!isImplDef || isZsl)) {
13776 LOGE("Error: Unsupported rotation of %d requested for stream"
13777 "type:%d and stream format:%d",
13778 newStream->rotation, newStream->stream_type,
13779 newStream->format);
13780 rc = -EINVAL;
13781 break;
13782 }
13783 }
13784
13785 return rc;
13786}
13787
13788/*===========================================================================
13789* FUNCTION : getFlashInfo
13790*
13791* DESCRIPTION: Retrieve information about whether the device has a flash.
13792*
13793* PARAMETERS :
13794* @cameraId : Camera id to query
13795* @hasFlash : Boolean indicating whether there is a flash device
13796* associated with given camera
13797* @flashNode : If a flash device exists, this will be its device node.
13798*
13799* RETURN :
13800* None
13801*==========================================================================*/
13802void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13803 bool& hasFlash,
13804 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13805{
13806 cam_capability_t* camCapability = gCamCapability[cameraId];
13807 if (NULL == camCapability) {
13808 hasFlash = false;
13809 flashNode[0] = '\0';
13810 } else {
13811 hasFlash = camCapability->flash_available;
13812 strlcpy(flashNode,
13813 (char*)camCapability->flash_dev_name,
13814 QCAMERA_MAX_FILEPATH_LENGTH);
13815 }
13816}
13817
13818/*===========================================================================
13819* FUNCTION : getEepromVersionInfo
13820*
13821* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13822*
13823* PARAMETERS : None
13824*
13825* RETURN : string describing EEPROM version
13826* "\0" if no such info available
13827*==========================================================================*/
13828const char *QCamera3HardwareInterface::getEepromVersionInfo()
13829{
13830 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13831}
13832
13833/*===========================================================================
13834* FUNCTION : getLdafCalib
13835*
13836* DESCRIPTION: Retrieve Laser AF calibration data
13837*
13838* PARAMETERS : None
13839*
13840* RETURN : Two uint32_t describing laser AF calibration data
13841* NULL if none is available.
13842*==========================================================================*/
13843const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13844{
13845 if (mLdafCalibExist) {
13846 return &mLdafCalib[0];
13847 } else {
13848 return NULL;
13849 }
13850}
13851
13852/*===========================================================================
13853 * FUNCTION : dynamicUpdateMetaStreamInfo
13854 *
13855 * DESCRIPTION: This function:
13856 * (1) stops all the channels
13857 * (2) returns error on pending requests and buffers
13858 * (3) sends metastream_info in setparams
13859 * (4) starts all channels
13860 * This is useful when sensor has to be restarted to apply any
13861 * settings such as frame rate from a different sensor mode
13862 *
13863 * PARAMETERS : None
13864 *
13865 * RETURN : NO_ERROR on success
13866 * Error codes on failure
13867 *
13868 *==========================================================================*/
13869int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13870{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013871 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013872 int rc = NO_ERROR;
13873
13874 LOGD("E");
13875
13876 rc = stopAllChannels();
13877 if (rc < 0) {
13878 LOGE("stopAllChannels failed");
13879 return rc;
13880 }
13881
13882 rc = notifyErrorForPendingRequests();
13883 if (rc < 0) {
13884 LOGE("notifyErrorForPendingRequests failed");
13885 return rc;
13886 }
13887
13888 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13889 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13890 "Format:%d",
13891 mStreamConfigInfo.type[i],
13892 mStreamConfigInfo.stream_sizes[i].width,
13893 mStreamConfigInfo.stream_sizes[i].height,
13894 mStreamConfigInfo.postprocess_mask[i],
13895 mStreamConfigInfo.format[i]);
13896 }
13897
13898 /* Send meta stream info once again so that ISP can start */
13899 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13900 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13901 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13902 mParameters);
13903 if (rc < 0) {
13904 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13905 }
13906
13907 rc = startAllChannels();
13908 if (rc < 0) {
13909 LOGE("startAllChannels failed");
13910 return rc;
13911 }
13912
13913 LOGD("X");
13914 return rc;
13915}
13916
13917/*===========================================================================
13918 * FUNCTION : stopAllChannels
13919 *
13920 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13921 *
13922 * PARAMETERS : None
13923 *
13924 * RETURN : NO_ERROR on success
13925 * Error codes on failure
13926 *
13927 *==========================================================================*/
13928int32_t QCamera3HardwareInterface::stopAllChannels()
13929{
13930 int32_t rc = NO_ERROR;
13931
13932 LOGD("Stopping all channels");
13933 // Stop the Streams/Channels
13934 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13935 it != mStreamInfo.end(); it++) {
13936 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13937 if (channel) {
13938 channel->stop();
13939 }
13940 (*it)->status = INVALID;
13941 }
13942
13943 if (mSupportChannel) {
13944 mSupportChannel->stop();
13945 }
13946 if (mAnalysisChannel) {
13947 mAnalysisChannel->stop();
13948 }
13949 if (mRawDumpChannel) {
13950 mRawDumpChannel->stop();
13951 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013952 if (mHdrPlusRawSrcChannel) {
13953 mHdrPlusRawSrcChannel->stop();
13954 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013955 if (mMetadataChannel) {
13956 /* If content of mStreamInfo is not 0, there is metadata stream */
13957 mMetadataChannel->stop();
13958 }
13959
13960 LOGD("All channels stopped");
13961 return rc;
13962}
13963
13964/*===========================================================================
13965 * FUNCTION : startAllChannels
13966 *
13967 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13968 *
13969 * PARAMETERS : None
13970 *
13971 * RETURN : NO_ERROR on success
13972 * Error codes on failure
13973 *
13974 *==========================================================================*/
13975int32_t QCamera3HardwareInterface::startAllChannels()
13976{
13977 int32_t rc = NO_ERROR;
13978
13979 LOGD("Start all channels ");
13980 // Start the Streams/Channels
13981 if (mMetadataChannel) {
13982 /* If content of mStreamInfo is not 0, there is metadata stream */
13983 rc = mMetadataChannel->start();
13984 if (rc < 0) {
13985 LOGE("META channel start failed");
13986 return rc;
13987 }
13988 }
13989 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13990 it != mStreamInfo.end(); it++) {
13991 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13992 if (channel) {
13993 rc = channel->start();
13994 if (rc < 0) {
13995 LOGE("channel start failed");
13996 return rc;
13997 }
13998 }
13999 }
14000 if (mAnalysisChannel) {
14001 mAnalysisChannel->start();
14002 }
14003 if (mSupportChannel) {
14004 rc = mSupportChannel->start();
14005 if (rc < 0) {
14006 LOGE("Support channel start failed");
14007 return rc;
14008 }
14009 }
14010 if (mRawDumpChannel) {
14011 rc = mRawDumpChannel->start();
14012 if (rc < 0) {
14013 LOGE("RAW dump channel start failed");
14014 return rc;
14015 }
14016 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014017 if (mHdrPlusRawSrcChannel) {
14018 rc = mHdrPlusRawSrcChannel->start();
14019 if (rc < 0) {
14020 LOGE("HDR+ RAW channel start failed");
14021 return rc;
14022 }
14023 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014024
14025 LOGD("All channels started");
14026 return rc;
14027}
14028
14029/*===========================================================================
14030 * FUNCTION : notifyErrorForPendingRequests
14031 *
14032 * DESCRIPTION: This function sends error for all the pending requests/buffers
14033 *
14034 * PARAMETERS : None
14035 *
14036 * RETURN : Error codes
14037 * NO_ERROR on success
14038 *
14039 *==========================================================================*/
14040int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14041{
Emilian Peev7650c122017-01-19 08:24:33 -080014042 notifyErrorFoPendingDepthData(mDepthChannel);
14043
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014044 auto pendingRequest = mPendingRequestsList.begin();
14045 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014046
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014047 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14048 // buffers (for which buffers aren't sent yet).
14049 while (pendingRequest != mPendingRequestsList.end() ||
14050 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14051 if (pendingRequest == mPendingRequestsList.end() ||
14052 pendingBuffer->frame_number < pendingRequest->frame_number) {
14053 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14054 // with error.
14055 for (auto &info : pendingBuffer->mPendingBufferList) {
14056 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014057 camera3_notify_msg_t notify_msg;
14058 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14059 notify_msg.type = CAMERA3_MSG_ERROR;
14060 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014061 notify_msg.message.error.error_stream = info.stream;
14062 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014063 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014064
14065 camera3_stream_buffer_t buffer = {};
14066 buffer.acquire_fence = -1;
14067 buffer.release_fence = -1;
14068 buffer.buffer = info.buffer;
14069 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14070 buffer.stream = info.stream;
14071 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014072 }
14073
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014074 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14075 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14076 pendingBuffer->frame_number > pendingRequest->frame_number) {
14077 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014078 camera3_notify_msg_t notify_msg;
14079 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14080 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014081 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14082 notify_msg.message.error.error_stream = nullptr;
14083 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014084 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014085
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014086 if (pendingRequest->input_buffer != nullptr) {
14087 camera3_capture_result result = {};
14088 result.frame_number = pendingRequest->frame_number;
14089 result.result = nullptr;
14090 result.input_buffer = pendingRequest->input_buffer;
14091 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014092 }
14093
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014094 mShutterDispatcher.clear(pendingRequest->frame_number);
14095 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14096 } else {
14097 // If both buffers and result metadata weren't sent yet, notify about a request error
14098 // and return buffers with error.
14099 for (auto &info : pendingBuffer->mPendingBufferList) {
14100 camera3_notify_msg_t notify_msg;
14101 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14102 notify_msg.type = CAMERA3_MSG_ERROR;
14103 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14104 notify_msg.message.error.error_stream = info.stream;
14105 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14106 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014107
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014108 camera3_stream_buffer_t buffer = {};
14109 buffer.acquire_fence = -1;
14110 buffer.release_fence = -1;
14111 buffer.buffer = info.buffer;
14112 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14113 buffer.stream = info.stream;
14114 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14115 }
14116
14117 if (pendingRequest->input_buffer != nullptr) {
14118 camera3_capture_result result = {};
14119 result.frame_number = pendingRequest->frame_number;
14120 result.result = nullptr;
14121 result.input_buffer = pendingRequest->input_buffer;
14122 orchestrateResult(&result);
14123 }
14124
14125 mShutterDispatcher.clear(pendingRequest->frame_number);
14126 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14127 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014128 }
14129 }
14130
14131 /* Reset pending frame Drop list and requests list */
14132 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014133 mShutterDispatcher.clear();
14134 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014135 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014136 LOGH("Cleared all the pending buffers ");
14137
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014138 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014139}
14140
14141bool QCamera3HardwareInterface::isOnEncoder(
14142 const cam_dimension_t max_viewfinder_size,
14143 uint32_t width, uint32_t height)
14144{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014145 return ((width > (uint32_t)max_viewfinder_size.width) ||
14146 (height > (uint32_t)max_viewfinder_size.height) ||
14147 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14148 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014149}
14150
14151/*===========================================================================
14152 * FUNCTION : setBundleInfo
14153 *
14154 * DESCRIPTION: Set bundle info for all streams that are bundle.
14155 *
14156 * PARAMETERS : None
14157 *
14158 * RETURN : NO_ERROR on success
14159 * Error codes on failure
14160 *==========================================================================*/
14161int32_t QCamera3HardwareInterface::setBundleInfo()
14162{
14163 int32_t rc = NO_ERROR;
14164
14165 if (mChannelHandle) {
14166 cam_bundle_config_t bundleInfo;
14167 memset(&bundleInfo, 0, sizeof(bundleInfo));
14168 rc = mCameraHandle->ops->get_bundle_info(
14169 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14170 if (rc != NO_ERROR) {
14171 LOGE("get_bundle_info failed");
14172 return rc;
14173 }
14174 if (mAnalysisChannel) {
14175 mAnalysisChannel->setBundleInfo(bundleInfo);
14176 }
14177 if (mSupportChannel) {
14178 mSupportChannel->setBundleInfo(bundleInfo);
14179 }
14180 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14181 it != mStreamInfo.end(); it++) {
14182 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14183 channel->setBundleInfo(bundleInfo);
14184 }
14185 if (mRawDumpChannel) {
14186 mRawDumpChannel->setBundleInfo(bundleInfo);
14187 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014188 if (mHdrPlusRawSrcChannel) {
14189 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14190 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014191 }
14192
14193 return rc;
14194}
14195
14196/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014197 * FUNCTION : setInstantAEC
14198 *
14199 * DESCRIPTION: Set Instant AEC related params.
14200 *
14201 * PARAMETERS :
14202 * @meta: CameraMetadata reference
14203 *
14204 * RETURN : NO_ERROR on success
14205 * Error codes on failure
14206 *==========================================================================*/
14207int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14208{
14209 int32_t rc = NO_ERROR;
14210 uint8_t val = 0;
14211 char prop[PROPERTY_VALUE_MAX];
14212
14213 // First try to configure instant AEC from framework metadata
14214 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14215 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14216 }
14217
14218 // If framework did not set this value, try to read from set prop.
14219 if (val == 0) {
14220 memset(prop, 0, sizeof(prop));
14221 property_get("persist.camera.instant.aec", prop, "0");
14222 val = (uint8_t)atoi(prop);
14223 }
14224
14225 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14226 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14227 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14228 mInstantAEC = val;
14229 mInstantAECSettledFrameNumber = 0;
14230 mInstantAecFrameIdxCount = 0;
14231 LOGH("instantAEC value set %d",val);
14232 if (mInstantAEC) {
14233 memset(prop, 0, sizeof(prop));
14234 property_get("persist.camera.ae.instant.bound", prop, "10");
14235 int32_t aec_frame_skip_cnt = atoi(prop);
14236 if (aec_frame_skip_cnt >= 0) {
14237 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14238 } else {
14239 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14240 rc = BAD_VALUE;
14241 }
14242 }
14243 } else {
14244 LOGE("Bad instant aec value set %d", val);
14245 rc = BAD_VALUE;
14246 }
14247 return rc;
14248}
14249
14250/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014251 * FUNCTION : get_num_overall_buffers
14252 *
14253 * DESCRIPTION: Estimate number of pending buffers across all requests.
14254 *
14255 * PARAMETERS : None
14256 *
14257 * RETURN : Number of overall pending buffers
14258 *
14259 *==========================================================================*/
14260uint32_t PendingBuffersMap::get_num_overall_buffers()
14261{
14262 uint32_t sum_buffers = 0;
14263 for (auto &req : mPendingBuffersInRequest) {
14264 sum_buffers += req.mPendingBufferList.size();
14265 }
14266 return sum_buffers;
14267}
14268
14269/*===========================================================================
14270 * FUNCTION : removeBuf
14271 *
14272 * DESCRIPTION: Remove a matching buffer from tracker.
14273 *
14274 * PARAMETERS : @buffer: image buffer for the callback
14275 *
14276 * RETURN : None
14277 *
14278 *==========================================================================*/
14279void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14280{
14281 bool buffer_found = false;
14282 for (auto req = mPendingBuffersInRequest.begin();
14283 req != mPendingBuffersInRequest.end(); req++) {
14284 for (auto k = req->mPendingBufferList.begin();
14285 k != req->mPendingBufferList.end(); k++ ) {
14286 if (k->buffer == buffer) {
14287 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14288 req->frame_number, buffer);
14289 k = req->mPendingBufferList.erase(k);
14290 if (req->mPendingBufferList.empty()) {
14291 // Remove this request from Map
14292 req = mPendingBuffersInRequest.erase(req);
14293 }
14294 buffer_found = true;
14295 break;
14296 }
14297 }
14298 if (buffer_found) {
14299 break;
14300 }
14301 }
14302 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14303 get_num_overall_buffers());
14304}
14305
14306/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014307 * FUNCTION : getBufErrStatus
14308 *
14309 * DESCRIPTION: get buffer error status
14310 *
14311 * PARAMETERS : @buffer: buffer handle
14312 *
14313 * RETURN : Error status
14314 *
14315 *==========================================================================*/
14316int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14317{
14318 for (auto& req : mPendingBuffersInRequest) {
14319 for (auto& k : req.mPendingBufferList) {
14320 if (k.buffer == buffer)
14321 return k.bufStatus;
14322 }
14323 }
14324 return CAMERA3_BUFFER_STATUS_OK;
14325}
14326
14327/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014328 * FUNCTION : setPAAFSupport
14329 *
14330 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14331 * feature mask according to stream type and filter
14332 * arrangement
14333 *
14334 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14335 * @stream_type: stream type
14336 * @filter_arrangement: filter arrangement
14337 *
14338 * RETURN : None
14339 *==========================================================================*/
14340void QCamera3HardwareInterface::setPAAFSupport(
14341 cam_feature_mask_t& feature_mask,
14342 cam_stream_type_t stream_type,
14343 cam_color_filter_arrangement_t filter_arrangement)
14344{
Thierry Strudel3d639192016-09-09 11:52:26 -070014345 switch (filter_arrangement) {
14346 case CAM_FILTER_ARRANGEMENT_RGGB:
14347 case CAM_FILTER_ARRANGEMENT_GRBG:
14348 case CAM_FILTER_ARRANGEMENT_GBRG:
14349 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014350 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14351 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014352 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014353 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14354 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014355 }
14356 break;
14357 case CAM_FILTER_ARRANGEMENT_Y:
14358 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14359 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14360 }
14361 break;
14362 default:
14363 break;
14364 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014365 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14366 feature_mask, stream_type, filter_arrangement);
14367
14368
Thierry Strudel3d639192016-09-09 11:52:26 -070014369}
14370
14371/*===========================================================================
14372* FUNCTION : getSensorMountAngle
14373*
14374* DESCRIPTION: Retrieve sensor mount angle
14375*
14376* PARAMETERS : None
14377*
14378* RETURN : sensor mount angle in uint32_t
14379*==========================================================================*/
14380uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14381{
14382 return gCamCapability[mCameraId]->sensor_mount_angle;
14383}
14384
14385/*===========================================================================
14386* FUNCTION : getRelatedCalibrationData
14387*
14388* DESCRIPTION: Retrieve related system calibration data
14389*
14390* PARAMETERS : None
14391*
14392* RETURN : Pointer of related system calibration data
14393*==========================================================================*/
14394const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14395{
14396 return (const cam_related_system_calibration_data_t *)
14397 &(gCamCapability[mCameraId]->related_cam_calibration);
14398}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014399
14400/*===========================================================================
14401 * FUNCTION : is60HzZone
14402 *
14403 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14404 *
14405 * PARAMETERS : None
14406 *
14407 * RETURN : True if in 60Hz zone, False otherwise
14408 *==========================================================================*/
14409bool QCamera3HardwareInterface::is60HzZone()
14410{
14411 time_t t = time(NULL);
14412 struct tm lt;
14413
14414 struct tm* r = localtime_r(&t, &lt);
14415
14416 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14417 return true;
14418 else
14419 return false;
14420}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014421
14422/*===========================================================================
14423 * FUNCTION : adjustBlackLevelForCFA
14424 *
14425 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14426 * of bayer CFA (Color Filter Array).
14427 *
14428 * PARAMETERS : @input: black level pattern in the order of RGGB
14429 * @output: black level pattern in the order of CFA
14430 * @color_arrangement: CFA color arrangement
14431 *
14432 * RETURN : None
14433 *==========================================================================*/
14434template<typename T>
14435void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14436 T input[BLACK_LEVEL_PATTERN_CNT],
14437 T output[BLACK_LEVEL_PATTERN_CNT],
14438 cam_color_filter_arrangement_t color_arrangement)
14439{
14440 switch (color_arrangement) {
14441 case CAM_FILTER_ARRANGEMENT_GRBG:
14442 output[0] = input[1];
14443 output[1] = input[0];
14444 output[2] = input[3];
14445 output[3] = input[2];
14446 break;
14447 case CAM_FILTER_ARRANGEMENT_GBRG:
14448 output[0] = input[2];
14449 output[1] = input[3];
14450 output[2] = input[0];
14451 output[3] = input[1];
14452 break;
14453 case CAM_FILTER_ARRANGEMENT_BGGR:
14454 output[0] = input[3];
14455 output[1] = input[2];
14456 output[2] = input[1];
14457 output[3] = input[0];
14458 break;
14459 case CAM_FILTER_ARRANGEMENT_RGGB:
14460 output[0] = input[0];
14461 output[1] = input[1];
14462 output[2] = input[2];
14463 output[3] = input[3];
14464 break;
14465 default:
14466 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14467 break;
14468 }
14469}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014470
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014471void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14472 CameraMetadata &resultMetadata,
14473 std::shared_ptr<metadata_buffer_t> settings)
14474{
14475 if (settings == nullptr) {
14476 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14477 return;
14478 }
14479
14480 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14481 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14482 }
14483
14484 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14485 String8 str((const char *)gps_methods);
14486 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14487 }
14488
14489 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14490 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14491 }
14492
14493 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14494 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14495 }
14496
14497 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14498 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14499 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14500 }
14501
14502 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14503 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14504 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14505 }
14506
14507 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14508 int32_t fwk_thumb_size[2];
14509 fwk_thumb_size[0] = thumb_size->width;
14510 fwk_thumb_size[1] = thumb_size->height;
14511 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14512 }
14513
14514 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14515 uint8_t fwk_intent = intent[0];
14516 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14517 }
14518}
14519
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014520bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14521 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14522 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014523{
14524 if (hdrPlusRequest == nullptr) return false;
14525
14526 // Check noise reduction mode is high quality.
14527 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14528 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14529 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014530 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14531 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014532 return false;
14533 }
14534
14535 // Check edge mode is high quality.
14536 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14537 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14538 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14539 return false;
14540 }
14541
14542 if (request.num_output_buffers != 1 ||
14543 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14544 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014545 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14546 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14547 request.output_buffers[0].stream->width,
14548 request.output_buffers[0].stream->height,
14549 request.output_buffers[0].stream->format);
14550 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014551 return false;
14552 }
14553
14554 // Get a YUV buffer from pic channel.
14555 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14556 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14557 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14558 if (res != OK) {
14559 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14560 __FUNCTION__, strerror(-res), res);
14561 return false;
14562 }
14563
14564 pbcamera::StreamBuffer buffer;
14565 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014566 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014567 buffer.data = yuvBuffer->buffer;
14568 buffer.dataSize = yuvBuffer->frame_len;
14569
14570 pbcamera::CaptureRequest pbRequest;
14571 pbRequest.id = request.frame_number;
14572 pbRequest.outputBuffers.push_back(buffer);
14573
14574 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014575 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014576 if (res != OK) {
14577 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14578 strerror(-res), res);
14579 return false;
14580 }
14581
14582 hdrPlusRequest->yuvBuffer = yuvBuffer;
14583 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14584
14585 return true;
14586}
14587
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014588status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14589{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014590 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14591 return OK;
14592 }
14593
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014594 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014595 if (res != OK) {
14596 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14597 strerror(-res), res);
14598 return res;
14599 }
14600 gHdrPlusClientOpening = true;
14601
14602 return OK;
14603}
14604
Chien-Yu Chenee335912017-02-09 17:53:20 -080014605status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14606{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014607 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014608
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014609 if (mHdrPlusModeEnabled) {
14610 return OK;
14611 }
14612
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014613 // Check if gHdrPlusClient is opened or being opened.
14614 if (gHdrPlusClient == nullptr) {
14615 if (gHdrPlusClientOpening) {
14616 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14617 return OK;
14618 }
14619
14620 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014621 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014622 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14623 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014624 return res;
14625 }
14626
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014627 // When opening HDR+ client completes, HDR+ mode will be enabled.
14628 return OK;
14629
Chien-Yu Chenee335912017-02-09 17:53:20 -080014630 }
14631
14632 // Configure stream for HDR+.
14633 res = configureHdrPlusStreamsLocked();
14634 if (res != OK) {
14635 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014636 return res;
14637 }
14638
14639 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14640 res = gHdrPlusClient->setZslHdrPlusMode(true);
14641 if (res != OK) {
14642 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014643 return res;
14644 }
14645
14646 mHdrPlusModeEnabled = true;
14647 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14648
14649 return OK;
14650}
14651
14652void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14653{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014654 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014655 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014656 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14657 if (res != OK) {
14658 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14659 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014660
14661 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014662 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014663 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014664 }
14665
14666 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014667 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014668 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14669}
14670
14671status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014672{
14673 pbcamera::InputConfiguration inputConfig;
14674 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14675 status_t res = OK;
14676
14677 // Configure HDR+ client streams.
14678 // Get input config.
14679 if (mHdrPlusRawSrcChannel) {
14680 // HDR+ input buffers will be provided by HAL.
14681 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14682 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14683 if (res != OK) {
14684 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14685 __FUNCTION__, strerror(-res), res);
14686 return res;
14687 }
14688
14689 inputConfig.isSensorInput = false;
14690 } else {
14691 // Sensor MIPI will send data to Easel.
14692 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014693 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014694 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14695 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14696 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14697 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14698 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014699 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014700 if (mSensorModeInfo.num_raw_bits != 10) {
14701 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14702 mSensorModeInfo.num_raw_bits);
14703 return BAD_VALUE;
14704 }
14705
14706 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014707 }
14708
14709 // Get output configurations.
14710 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014711 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014712
14713 // Easel may need to output YUV output buffers if mPictureChannel was created.
14714 pbcamera::StreamConfiguration yuvOutputConfig;
14715 if (mPictureChannel != nullptr) {
14716 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14717 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14718 if (res != OK) {
14719 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14720 __FUNCTION__, strerror(-res), res);
14721
14722 return res;
14723 }
14724
14725 outputStreamConfigs.push_back(yuvOutputConfig);
14726 }
14727
14728 // TODO: consider other channels for YUV output buffers.
14729
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014730 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014731 if (res != OK) {
14732 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14733 strerror(-res), res);
14734 return res;
14735 }
14736
14737 return OK;
14738}
14739
Chien-Yu Chen933db802017-07-14 14:31:53 -070014740void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
14741{
14742 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
14743 // Set HAL state to error.
14744 pthread_mutex_lock(&mMutex);
14745 mState = ERROR;
14746 pthread_mutex_unlock(&mMutex);
14747
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014748 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen933db802017-07-14 14:31:53 -070014749}
14750
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014751void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14752{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014753 if (client == nullptr) {
14754 ALOGE("%s: Opened client is null.", __FUNCTION__);
14755 return;
14756 }
14757
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014758 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014759 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14760
14761 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014762 if (!gHdrPlusClientOpening) {
14763 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14764 return;
14765 }
14766
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014767 gHdrPlusClient = std::move(client);
14768 gHdrPlusClientOpening = false;
14769
14770 // Set static metadata.
14771 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14772 if (res != OK) {
14773 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14774 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014775 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014776 gHdrPlusClient = nullptr;
14777 return;
14778 }
14779
14780 // Enable HDR+ mode.
14781 res = enableHdrPlusModeLocked();
14782 if (res != OK) {
14783 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14784 }
14785}
14786
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014787void QCamera3HardwareInterface::onOpenFailed(status_t err)
14788{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014789 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14790 Mutex::Autolock l(gHdrPlusClientLock);
14791 gHdrPlusClientOpening = false;
14792}
14793
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014794void QCamera3HardwareInterface::onFatalError()
14795{
14796 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14797
14798 // Set HAL state to error.
14799 pthread_mutex_lock(&mMutex);
14800 mState = ERROR;
14801 pthread_mutex_unlock(&mMutex);
14802
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014803 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014804}
14805
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014806void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014807 const camera_metadata_t &resultMetadata)
14808{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014809 if (result != nullptr) {
14810 if (result->outputBuffers.size() != 1) {
14811 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14812 result->outputBuffers.size());
14813 return;
14814 }
14815
14816 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14817 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14818 result->outputBuffers[0].streamId);
14819 return;
14820 }
14821
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014822 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014823 HdrPlusPendingRequest pendingRequest;
14824 {
14825 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14826 auto req = mHdrPlusPendingRequests.find(result->requestId);
14827 pendingRequest = req->second;
14828 }
14829
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014830 // Update the result metadata with the settings of the HDR+ still capture request because
14831 // the result metadata belongs to a ZSL buffer.
14832 CameraMetadata metadata;
14833 metadata = &resultMetadata;
14834 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14835 camera_metadata_t* updatedResultMetadata = metadata.release();
14836
14837 QCamera3PicChannel *picChannel =
14838 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14839
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014840 // Check if dumping HDR+ YUV output is enabled.
14841 char prop[PROPERTY_VALUE_MAX];
14842 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14843 bool dumpYuvOutput = atoi(prop);
14844
14845 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014846 // Dump yuv buffer to a ppm file.
14847 pbcamera::StreamConfiguration outputConfig;
14848 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14849 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14850 if (rc == OK) {
14851 char buf[FILENAME_MAX] = {};
14852 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14853 result->requestId, result->outputBuffers[0].streamId,
14854 outputConfig.image.width, outputConfig.image.height);
14855
14856 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14857 } else {
14858 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14859 __FUNCTION__, strerror(-rc), rc);
14860 }
14861 }
14862
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014863 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14864 auto halMetadata = std::make_shared<metadata_buffer_t>();
14865 clear_metadata_buffer(halMetadata.get());
14866
14867 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14868 // encoding.
14869 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14870 halStreamId, /*minFrameDuration*/0);
14871 if (res == OK) {
14872 // Return the buffer to pic channel for encoding.
14873 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14874 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14875 halMetadata);
14876 } else {
14877 // Return the buffer without encoding.
14878 // TODO: This should not happen but we may want to report an error buffer to camera
14879 // service.
14880 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14881 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14882 strerror(-res), res);
14883 }
14884
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014885 // Find the timestamp
14886 camera_metadata_ro_entry_t entry;
14887 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14888 ANDROID_SENSOR_TIMESTAMP, &entry);
14889 if (res != OK) {
14890 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14891 __FUNCTION__, result->requestId, strerror(-res), res);
14892 } else {
14893 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14894 }
14895
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014896 // Send HDR+ metadata to framework.
14897 {
14898 pthread_mutex_lock(&mMutex);
14899
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014900 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14901 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014902 pthread_mutex_unlock(&mMutex);
14903 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014904
14905 // Remove the HDR+ pending request.
14906 {
14907 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14908 auto req = mHdrPlusPendingRequests.find(result->requestId);
14909 mHdrPlusPendingRequests.erase(req);
14910 }
14911 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014912}
14913
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014914void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14915{
14916 if (failedResult == nullptr) {
14917 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14918 return;
14919 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014920
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014921 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014922
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014923 // Remove the pending HDR+ request.
14924 {
14925 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14926 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14927
14928 // Return the buffer to pic channel.
14929 QCamera3PicChannel *picChannel =
14930 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14931 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14932
14933 mHdrPlusPendingRequests.erase(pendingRequest);
14934 }
14935
14936 pthread_mutex_lock(&mMutex);
14937
14938 // Find the pending buffers.
14939 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14940 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14941 if (pendingBuffers->frame_number == failedResult->requestId) {
14942 break;
14943 }
14944 pendingBuffers++;
14945 }
14946
14947 // Send out buffer errors for the pending buffers.
14948 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14949 std::vector<camera3_stream_buffer_t> streamBuffers;
14950 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14951 // Prepare a stream buffer.
14952 camera3_stream_buffer_t streamBuffer = {};
14953 streamBuffer.stream = buffer.stream;
14954 streamBuffer.buffer = buffer.buffer;
14955 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14956 streamBuffer.acquire_fence = -1;
14957 streamBuffer.release_fence = -1;
14958
14959 streamBuffers.push_back(streamBuffer);
14960
14961 // Send out error buffer event.
14962 camera3_notify_msg_t notify_msg = {};
14963 notify_msg.type = CAMERA3_MSG_ERROR;
14964 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14965 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14966 notify_msg.message.error.error_stream = buffer.stream;
14967
14968 orchestrateNotify(&notify_msg);
14969 }
14970
14971 camera3_capture_result_t result = {};
14972 result.frame_number = pendingBuffers->frame_number;
14973 result.num_output_buffers = streamBuffers.size();
14974 result.output_buffers = &streamBuffers[0];
14975
14976 // Send out result with buffer errors.
14977 orchestrateResult(&result);
14978
14979 // Remove pending buffers.
14980 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14981 }
14982
14983 // Remove pending request.
14984 auto halRequest = mPendingRequestsList.begin();
14985 while (halRequest != mPendingRequestsList.end()) {
14986 if (halRequest->frame_number == failedResult->requestId) {
14987 mPendingRequestsList.erase(halRequest);
14988 break;
14989 }
14990 halRequest++;
14991 }
14992
14993 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014994}
14995
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014996
14997ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14998 mParent(parent) {}
14999
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015000void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015001{
15002 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015003
15004 if (isReprocess) {
15005 mReprocessShutters.emplace(frameNumber, Shutter());
15006 } else {
15007 mShutters.emplace(frameNumber, Shutter());
15008 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015009}
15010
15011void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15012{
15013 std::lock_guard<std::mutex> lock(mLock);
15014
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015015 std::map<uint32_t, Shutter> *shutters = nullptr;
15016
15017 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015018 auto shutter = mShutters.find(frameNumber);
15019 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015020 shutter = mReprocessShutters.find(frameNumber);
15021 if (shutter == mReprocessShutters.end()) {
15022 // Shutter was already sent.
15023 return;
15024 }
15025 shutters = &mReprocessShutters;
15026 } else {
15027 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015028 }
15029
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015030 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015031 shutter->second.ready = true;
15032 shutter->second.timestamp = timestamp;
15033
15034 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015035 shutter = shutters->begin();
15036 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015037 if (!shutter->second.ready) {
15038 // If this shutter is not ready, the following shutters can't be sent.
15039 break;
15040 }
15041
15042 camera3_notify_msg_t msg = {};
15043 msg.type = CAMERA3_MSG_SHUTTER;
15044 msg.message.shutter.frame_number = shutter->first;
15045 msg.message.shutter.timestamp = shutter->second.timestamp;
15046 mParent->orchestrateNotify(&msg);
15047
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015048 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015049 }
15050}
15051
15052void ShutterDispatcher::clear(uint32_t frameNumber)
15053{
15054 std::lock_guard<std::mutex> lock(mLock);
15055 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015056 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015057}
15058
15059void ShutterDispatcher::clear()
15060{
15061 std::lock_guard<std::mutex> lock(mLock);
15062
15063 // Log errors for stale shutters.
15064 for (auto &shutter : mShutters) {
15065 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15066 __FUNCTION__, shutter.first, shutter.second.ready,
15067 shutter.second.timestamp);
15068 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015069
15070 // Log errors for stale reprocess shutters.
15071 for (auto &shutter : mReprocessShutters) {
15072 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15073 __FUNCTION__, shutter.first, shutter.second.ready,
15074 shutter.second.timestamp);
15075 }
15076
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015077 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015078 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015079}
15080
15081OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15082 mParent(parent) {}
15083
15084status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15085{
15086 std::lock_guard<std::mutex> lock(mLock);
15087 mStreamBuffers.clear();
15088 if (!streamList) {
15089 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15090 return -EINVAL;
15091 }
15092
15093 // Create a "frame-number -> buffer" map for each stream.
15094 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15095 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15096 }
15097
15098 return OK;
15099}
15100
15101status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15102{
15103 std::lock_guard<std::mutex> lock(mLock);
15104
15105 // Find the "frame-number -> buffer" map for the stream.
15106 auto buffers = mStreamBuffers.find(stream);
15107 if (buffers == mStreamBuffers.end()) {
15108 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15109 return -EINVAL;
15110 }
15111
15112 // Create an unready buffer for this frame number.
15113 buffers->second.emplace(frameNumber, Buffer());
15114 return OK;
15115}
15116
15117void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15118 const camera3_stream_buffer_t &buffer)
15119{
15120 std::lock_guard<std::mutex> lock(mLock);
15121
15122 // Find the frame number -> buffer map for the stream.
15123 auto buffers = mStreamBuffers.find(buffer.stream);
15124 if (buffers == mStreamBuffers.end()) {
15125 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15126 return;
15127 }
15128
15129 // Find the unready buffer this frame number and mark it ready.
15130 auto pendingBuffer = buffers->second.find(frameNumber);
15131 if (pendingBuffer == buffers->second.end()) {
15132 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15133 return;
15134 }
15135
15136 pendingBuffer->second.ready = true;
15137 pendingBuffer->second.buffer = buffer;
15138
15139 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15140 pendingBuffer = buffers->second.begin();
15141 while (pendingBuffer != buffers->second.end()) {
15142 if (!pendingBuffer->second.ready) {
15143 // If this buffer is not ready, the following buffers can't be sent.
15144 break;
15145 }
15146
15147 camera3_capture_result_t result = {};
15148 result.frame_number = pendingBuffer->first;
15149 result.num_output_buffers = 1;
15150 result.output_buffers = &pendingBuffer->second.buffer;
15151
15152 // Send out result with buffer errors.
15153 mParent->orchestrateResult(&result);
15154
15155 pendingBuffer = buffers->second.erase(pendingBuffer);
15156 }
15157}
15158
15159void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15160{
15161 std::lock_guard<std::mutex> lock(mLock);
15162
15163 // Log errors for stale buffers.
15164 for (auto &buffers : mStreamBuffers) {
15165 for (auto &buffer : buffers.second) {
15166 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15167 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15168 }
15169 buffers.second.clear();
15170 }
15171
15172 if (clearConfiguredStreams) {
15173 mStreamBuffers.clear();
15174 }
15175}
15176
Thierry Strudel3d639192016-09-09 11:52:26 -070015177}; //end namespace qcamera