blob: d4af546ff29d133eb8e4d4c8028f9926912ff20c [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +010099#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Jason Lee8ce36fa2017-04-19 19:40:37 -0700116/* Face rect indices */
117#define FACE_LEFT 0
118#define FACE_TOP 1
119#define FACE_RIGHT 2
120#define FACE_BOTTOM 3
121#define FACE_WEIGHT 4
122
Thierry Strudel04e026f2016-10-10 11:27:36 -0700123/* Face landmarks indices */
124#define LEFT_EYE_X 0
125#define LEFT_EYE_Y 1
126#define RIGHT_EYE_X 2
127#define RIGHT_EYE_Y 3
128#define MOUTH_X 4
129#define MOUTH_Y 5
130#define TOTAL_LANDMARK_INDICES 6
131
Zhijun He2a5df222017-04-04 18:20:38 -0700132// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700133#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700134
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700135// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
136#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
137
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700138// Whether to check for the GPU stride padding, or use the default
139//#define CHECK_GPU_PIXEL_ALIGNMENT
140
Thierry Strudel3d639192016-09-09 11:52:26 -0700141cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
142const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
143extern pthread_mutex_t gCamLock;
144volatile uint32_t gCamHal3LogLevel = 1;
145extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700146
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800147// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700148// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700149std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
151std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
152bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700155
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800156// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
159Mutex gHdrPlusClientLock; // Protect above Easel related variables.
160
Thierry Strudel3d639192016-09-09 11:52:26 -0700161
162const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163 {"On", CAM_CDS_MODE_ON},
164 {"Off", CAM_CDS_MODE_OFF},
165 {"Auto",CAM_CDS_MODE_AUTO}
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_video_hdr_mode_t,
169 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
171 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
172};
173
Thierry Strudel54dc9782017-02-15 12:12:10 -0800174const QCamera3HardwareInterface::QCameraMap<
175 camera_metadata_enum_android_binning_correction_mode_t,
176 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
178 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
179};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_ir_mode_t,
183 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
185 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187};
Thierry Strudel3d639192016-09-09 11:52:26 -0700188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_effect_mode_t,
191 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
193 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
194 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
195 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
197 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_awb_mode_t,
205 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
207 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
208 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
209 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
210 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
214 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_scene_mode_t,
219 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
221 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
222 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
223 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
227 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
228 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
229 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
230 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
231 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
232 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
233 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
234 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800235 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
236 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_control_af_mode_t,
241 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
244 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
245 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
246 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_color_correction_aberration_mode_t,
253 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255 CAM_COLOR_CORRECTION_ABERRATION_OFF },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257 CAM_COLOR_CORRECTION_ABERRATION_FAST },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260};
261
262const QCamera3HardwareInterface::QCameraMap<
263 camera_metadata_enum_android_control_ae_antibanding_mode_t,
264 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_control_ae_mode_t,
273 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
277 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
279};
280
281const QCamera3HardwareInterface::QCameraMap<
282 camera_metadata_enum_android_flash_mode_t,
283 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
284 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
285 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
286 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
287};
288
289const QCamera3HardwareInterface::QCameraMap<
290 camera_metadata_enum_android_statistics_face_detect_mode_t,
291 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
295};
296
297const QCamera3HardwareInterface::QCameraMap<
298 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
299 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
301 CAM_FOCUS_UNCALIBRATED },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
303 CAM_FOCUS_APPROXIMATE },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
305 CAM_FOCUS_CALIBRATED }
306};
307
308const QCamera3HardwareInterface::QCameraMap<
309 camera_metadata_enum_android_lens_state_t,
310 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
311 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
312 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
313};
314
315const int32_t available_thumbnail_sizes[] = {0, 0,
316 176, 144,
317 240, 144,
318 256, 144,
319 240, 160,
320 256, 154,
321 240, 240,
322 320, 240};
323
324const QCamera3HardwareInterface::QCameraMap<
325 camera_metadata_enum_android_sensor_test_pattern_mode_t,
326 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
333};
334
335/* Since there is no mapping for all the options some Android enum are not listed.
336 * Also, the order in this list is important because while mapping from HAL to Android it will
337 * traverse from lower to higher index which means that for HAL values that are map to different
338 * Android values, the traverse logic will select the first one found.
339 */
340const QCamera3HardwareInterface::QCameraMap<
341 camera_metadata_enum_android_sensor_reference_illuminant1_t,
342 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
359};
360
361const QCamera3HardwareInterface::QCameraMap<
362 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
363 { 60, CAM_HFR_MODE_60FPS},
364 { 90, CAM_HFR_MODE_90FPS},
365 { 120, CAM_HFR_MODE_120FPS},
366 { 150, CAM_HFR_MODE_150FPS},
367 { 180, CAM_HFR_MODE_180FPS},
368 { 210, CAM_HFR_MODE_210FPS},
369 { 240, CAM_HFR_MODE_240FPS},
370 { 480, CAM_HFR_MODE_480FPS},
371};
372
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700373const QCamera3HardwareInterface::QCameraMap<
374 qcamera3_ext_instant_aec_mode_t,
375 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
376 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
377 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
379};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800380
381const QCamera3HardwareInterface::QCameraMap<
382 qcamera3_ext_exposure_meter_mode_t,
383 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
384 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
386 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
387 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
388 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
390 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
391};
392
393const QCamera3HardwareInterface::QCameraMap<
394 qcamera3_ext_iso_mode_t,
395 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
396 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
397 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
398 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
399 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
400 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
401 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
402 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
403 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
404};
405
Thierry Strudel3d639192016-09-09 11:52:26 -0700406camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
407 .initialize = QCamera3HardwareInterface::initialize,
408 .configure_streams = QCamera3HardwareInterface::configure_streams,
409 .register_stream_buffers = NULL,
410 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
411 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
412 .get_metadata_vendor_tag_ops = NULL,
413 .dump = QCamera3HardwareInterface::dump,
414 .flush = QCamera3HardwareInterface::flush,
415 .reserved = {0},
416};
417
418// initialise to some default value
419uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
420
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700421static inline void logEaselEvent(const char *tag, const char *event) {
422 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
423 struct timespec ts = {};
424 static int64_t kMsPerSec = 1000;
425 static int64_t kNsPerMs = 1000000;
426 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
427 if (res != OK) {
428 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
429 } else {
430 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
431 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
432 }
433 }
434}
435
Thierry Strudel3d639192016-09-09 11:52:26 -0700436/*===========================================================================
437 * FUNCTION : QCamera3HardwareInterface
438 *
439 * DESCRIPTION: constructor of QCamera3HardwareInterface
440 *
441 * PARAMETERS :
442 * @cameraId : camera ID
443 *
444 * RETURN : none
445 *==========================================================================*/
446QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
447 const camera_module_callbacks_t *callbacks)
448 : mCameraId(cameraId),
449 mCameraHandle(NULL),
450 mCameraInitialized(false),
451 mCallbackOps(NULL),
452 mMetadataChannel(NULL),
453 mPictureChannel(NULL),
454 mRawChannel(NULL),
455 mSupportChannel(NULL),
456 mAnalysisChannel(NULL),
457 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700458 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700459 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800460 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100461 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800462 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mChannelHandle(0),
464 mFirstConfiguration(true),
465 mFlush(false),
466 mFlushPerf(false),
467 mParamHeap(NULL),
468 mParameters(NULL),
469 mPrevParameters(NULL),
470 m_bIsVideo(false),
471 m_bIs4KVideo(false),
472 m_bEisSupportedSize(false),
473 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800474 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700476 mShutterDispatcher(this),
477 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mMinProcessedFrameDuration(0),
479 mMinJpegFrameDuration(0),
480 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100481 mExpectedFrameDuration(0),
482 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700483 mMetaFrameCount(0U),
484 mUpdateDebugLevel(false),
485 mCallbacks(callbacks),
486 mCaptureIntent(0),
487 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700488 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800489 /* DevCamDebug metadata internal m control*/
490 mDevCamDebugMetaEnable(0),
491 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700492 mBatchSize(0),
493 mToBeQueuedVidBufs(0),
494 mHFRVideoFps(DEFAULT_VIDEO_FPS),
495 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800496 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800497 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700498 mFirstFrameNumberInBatch(0),
499 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800500 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700501 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
502 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000503 mPDSupported(false),
504 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700505 mInstantAEC(false),
506 mResetInstantAEC(false),
507 mInstantAECSettledFrameNumber(0),
508 mAecSkipDisplayFrameBound(0),
509 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800510 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700513 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mState(CLOSED),
515 mIsDeviceLinked(false),
516 mIsMainCamera(true),
517 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700518 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800519 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800520 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700521 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800522 mIsApInputUsedForHdrPlus(false),
523 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700524 m_bSensorHDREnabled(false),
525 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700526{
527 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700528 mCommon.init(gCamCapability[cameraId]);
529 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700530#ifndef USE_HAL_3_3
531 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
532#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700533 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700534#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700535 mCameraDevice.common.close = close_camera_device;
536 mCameraDevice.ops = &mCameraOps;
537 mCameraDevice.priv = this;
538 gCamCapability[cameraId]->version = CAM_HAL_V3;
539 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
540 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
541 gCamCapability[cameraId]->min_num_pp_bufs = 3;
542
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800543 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700544
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800545 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700546 mPendingLiveRequest = 0;
547 mCurrentRequestId = -1;
548 pthread_mutex_init(&mMutex, NULL);
549
550 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
551 mDefaultMetadata[i] = NULL;
552
553 // Getting system props of different kinds
554 char prop[PROPERTY_VALUE_MAX];
555 memset(prop, 0, sizeof(prop));
556 property_get("persist.camera.raw.dump", prop, "0");
557 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800558 property_get("persist.camera.hal3.force.hdr", prop, "0");
559 mForceHdrSnapshot = atoi(prop);
560
Thierry Strudel3d639192016-09-09 11:52:26 -0700561 if (mEnableRawDump)
562 LOGD("Raw dump from Camera HAL enabled");
563
564 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
565 memset(mLdafCalib, 0, sizeof(mLdafCalib));
566
567 memset(prop, 0, sizeof(prop));
568 property_get("persist.camera.tnr.preview", prop, "0");
569 m_bTnrPreview = (uint8_t)atoi(prop);
570
571 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800572 property_get("persist.camera.swtnr.preview", prop, "1");
573 m_bSwTnrPreview = (uint8_t)atoi(prop);
574
575 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700576 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700577 m_bTnrVideo = (uint8_t)atoi(prop);
578
579 memset(prop, 0, sizeof(prop));
580 property_get("persist.camera.avtimer.debug", prop, "0");
581 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800582 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700583
Thierry Strudel54dc9782017-02-15 12:12:10 -0800584 memset(prop, 0, sizeof(prop));
585 property_get("persist.camera.cacmode.disable", prop, "0");
586 m_cacModeDisabled = (uint8_t)atoi(prop);
587
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700588 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
589 m_MobicatMask = property_get_bool("persist.camera.mobicat", 0);
590
Thierry Strudel3d639192016-09-09 11:52:26 -0700591 //Load and read GPU library.
592 lib_surface_utils = NULL;
593 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700594 mSurfaceStridePadding = CAM_PAD_TO_64;
595#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700596 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
597 if (lib_surface_utils) {
598 *(void **)&LINK_get_surface_pixel_alignment =
599 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
600 if (LINK_get_surface_pixel_alignment) {
601 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
602 }
603 dlclose(lib_surface_utils);
604 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700605#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000606 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
607 mPDSupported = (0 <= mPDIndex) ? true : false;
608
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700609 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700610}
611
612/*===========================================================================
613 * FUNCTION : ~QCamera3HardwareInterface
614 *
615 * DESCRIPTION: destructor of QCamera3HardwareInterface
616 *
617 * PARAMETERS : none
618 *
619 * RETURN : none
620 *==========================================================================*/
621QCamera3HardwareInterface::~QCamera3HardwareInterface()
622{
623 LOGD("E");
624
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800625 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700626
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800627 // Disable power hint and enable the perf lock for close camera
628 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
629 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
630
631 // unlink of dualcam during close camera
632 if (mIsDeviceLinked) {
633 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
634 &m_pDualCamCmdPtr->bundle_info;
635 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
636 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
637 pthread_mutex_lock(&gCamLock);
638
639 if (mIsMainCamera == 1) {
640 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
641 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
642 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
643 // related session id should be session id of linked session
644 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
645 } else {
646 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
647 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
648 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
649 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
650 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800651 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800652 pthread_mutex_unlock(&gCamLock);
653
654 rc = mCameraHandle->ops->set_dual_cam_cmd(
655 mCameraHandle->camera_handle);
656 if (rc < 0) {
657 LOGE("Dualcam: Unlink failed, but still proceed to close");
658 }
659 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700660
661 /* We need to stop all streams before deleting any stream */
662 if (mRawDumpChannel) {
663 mRawDumpChannel->stop();
664 }
665
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700666 if (mHdrPlusRawSrcChannel) {
667 mHdrPlusRawSrcChannel->stop();
668 }
669
Thierry Strudel3d639192016-09-09 11:52:26 -0700670 // NOTE: 'camera3_stream_t *' objects are already freed at
671 // this stage by the framework
672 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
673 it != mStreamInfo.end(); it++) {
674 QCamera3ProcessingChannel *channel = (*it)->channel;
675 if (channel) {
676 channel->stop();
677 }
678 }
679 if (mSupportChannel)
680 mSupportChannel->stop();
681
682 if (mAnalysisChannel) {
683 mAnalysisChannel->stop();
684 }
685 if (mMetadataChannel) {
686 mMetadataChannel->stop();
687 }
688 if (mChannelHandle) {
689 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700690 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700691 LOGD("stopping channel %d", mChannelHandle);
692 }
693
694 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
695 it != mStreamInfo.end(); it++) {
696 QCamera3ProcessingChannel *channel = (*it)->channel;
697 if (channel)
698 delete channel;
699 free (*it);
700 }
701 if (mSupportChannel) {
702 delete mSupportChannel;
703 mSupportChannel = NULL;
704 }
705
706 if (mAnalysisChannel) {
707 delete mAnalysisChannel;
708 mAnalysisChannel = NULL;
709 }
710 if (mRawDumpChannel) {
711 delete mRawDumpChannel;
712 mRawDumpChannel = NULL;
713 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700714 if (mHdrPlusRawSrcChannel) {
715 delete mHdrPlusRawSrcChannel;
716 mHdrPlusRawSrcChannel = NULL;
717 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700718 if (mDummyBatchChannel) {
719 delete mDummyBatchChannel;
720 mDummyBatchChannel = NULL;
721 }
722
723 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800724 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700725
726 if (mMetadataChannel) {
727 delete mMetadataChannel;
728 mMetadataChannel = NULL;
729 }
730
731 /* Clean up all channels */
732 if (mCameraInitialized) {
733 if(!mFirstConfiguration){
734 //send the last unconfigure
735 cam_stream_size_info_t stream_config_info;
736 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
737 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
738 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800739 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700740 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700741 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700742 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
743 stream_config_info);
744 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
745 if (rc < 0) {
746 LOGE("set_parms failed for unconfigure");
747 }
748 }
749 deinitParameters();
750 }
751
752 if (mChannelHandle) {
753 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
754 mChannelHandle);
755 LOGH("deleting channel %d", mChannelHandle);
756 mChannelHandle = 0;
757 }
758
759 if (mState != CLOSED)
760 closeCamera();
761
762 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
763 req.mPendingBufferList.clear();
764 }
765 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700766 for (pendingRequestIterator i = mPendingRequestsList.begin();
767 i != mPendingRequestsList.end();) {
768 i = erasePendingRequest(i);
769 }
770 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
771 if (mDefaultMetadata[i])
772 free_camera_metadata(mDefaultMetadata[i]);
773
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800774 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700775
776 pthread_cond_destroy(&mRequestCond);
777
778 pthread_cond_destroy(&mBuffersCond);
779
780 pthread_mutex_destroy(&mMutex);
781 LOGD("X");
782}
783
784/*===========================================================================
785 * FUNCTION : erasePendingRequest
786 *
787 * DESCRIPTION: function to erase a desired pending request after freeing any
788 * allocated memory
789 *
790 * PARAMETERS :
791 * @i : iterator pointing to pending request to be erased
792 *
793 * RETURN : iterator pointing to the next request
794 *==========================================================================*/
795QCamera3HardwareInterface::pendingRequestIterator
796 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
797{
798 if (i->input_buffer != NULL) {
799 free(i->input_buffer);
800 i->input_buffer = NULL;
801 }
802 if (i->settings != NULL)
803 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100804
805 mExpectedInflightDuration -= i->expectedFrameDuration;
806 if (mExpectedInflightDuration < 0) {
807 LOGE("Negative expected in-flight duration!");
808 mExpectedInflightDuration = 0;
809 }
810
Thierry Strudel3d639192016-09-09 11:52:26 -0700811 return mPendingRequestsList.erase(i);
812}
813
814/*===========================================================================
815 * FUNCTION : camEvtHandle
816 *
817 * DESCRIPTION: Function registered to mm-camera-interface to handle events
818 *
819 * PARAMETERS :
820 * @camera_handle : interface layer camera handle
821 * @evt : ptr to event
822 * @user_data : user data ptr
823 *
824 * RETURN : none
825 *==========================================================================*/
826void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
827 mm_camera_event_t *evt,
828 void *user_data)
829{
830 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
831 if (obj && evt) {
832 switch(evt->server_event_type) {
833 case CAM_EVENT_TYPE_DAEMON_DIED:
834 pthread_mutex_lock(&obj->mMutex);
835 obj->mState = ERROR;
836 pthread_mutex_unlock(&obj->mMutex);
837 LOGE("Fatal, camera daemon died");
838 break;
839
840 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
841 LOGD("HAL got request pull from Daemon");
842 pthread_mutex_lock(&obj->mMutex);
843 obj->mWokenUpByDaemon = true;
844 obj->unblockRequestIfNecessary();
845 pthread_mutex_unlock(&obj->mMutex);
846 break;
847
848 default:
849 LOGW("Warning: Unhandled event %d",
850 evt->server_event_type);
851 break;
852 }
853 } else {
854 LOGE("NULL user_data/evt");
855 }
856}
857
858/*===========================================================================
859 * FUNCTION : openCamera
860 *
861 * DESCRIPTION: open camera
862 *
863 * PARAMETERS :
864 * @hw_device : double ptr for camera device struct
865 *
866 * RETURN : int32_t type of status
867 * NO_ERROR -- success
868 * none-zero failure code
869 *==========================================================================*/
870int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
871{
872 int rc = 0;
873 if (mState != CLOSED) {
874 *hw_device = NULL;
875 return PERMISSION_DENIED;
876 }
877
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700878 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800879 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700880 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
881 mCameraId);
882
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700883 if (mCameraHandle) {
884 LOGE("Failure: Camera already opened");
885 return ALREADY_EXISTS;
886 }
887
888 {
889 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700890 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700891 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen933db802017-07-14 14:31:53 -0700892 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700893 if (rc != 0) {
894 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
895 return rc;
896 }
897 }
898 }
899
Thierry Strudel3d639192016-09-09 11:52:26 -0700900 rc = openCamera();
901 if (rc == 0) {
902 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800903 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700904 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700905
906 // Suspend Easel because opening camera failed.
907 {
908 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -0700909 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
910 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700911 if (suspendErr != 0) {
912 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
913 strerror(-suspendErr), suspendErr);
914 }
915 }
916 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800917 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700918
Thierry Strudel3d639192016-09-09 11:52:26 -0700919 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
920 mCameraId, rc);
921
922 if (rc == NO_ERROR) {
923 mState = OPENED;
924 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800925
Thierry Strudel3d639192016-09-09 11:52:26 -0700926 return rc;
927}
928
929/*===========================================================================
930 * FUNCTION : openCamera
931 *
932 * DESCRIPTION: open camera
933 *
934 * PARAMETERS : none
935 *
936 * RETURN : int32_t type of status
937 * NO_ERROR -- success
938 * none-zero failure code
939 *==========================================================================*/
940int QCamera3HardwareInterface::openCamera()
941{
942 int rc = 0;
943 char value[PROPERTY_VALUE_MAX];
944
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800945 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800946
Thierry Strudel3d639192016-09-09 11:52:26 -0700947 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
948 if (rc < 0) {
949 LOGE("Failed to reserve flash for camera id: %d",
950 mCameraId);
951 return UNKNOWN_ERROR;
952 }
953
954 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
955 if (rc) {
956 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
957 return rc;
958 }
959
960 if (!mCameraHandle) {
961 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
962 return -ENODEV;
963 }
964
965 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
966 camEvtHandle, (void *)this);
967
968 if (rc < 0) {
969 LOGE("Error, failed to register event callback");
970 /* Not closing camera here since it is already handled in destructor */
971 return FAILED_TRANSACTION;
972 }
973
974 mExifParams.debug_params =
975 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
976 if (mExifParams.debug_params) {
977 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
978 } else {
979 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
980 return NO_MEMORY;
981 }
982 mFirstConfiguration = true;
983
984 //Notify display HAL that a camera session is active.
985 //But avoid calling the same during bootup because camera service might open/close
986 //cameras at boot time during its initialization and display service will also internally
987 //wait for camera service to initialize first while calling this display API, resulting in a
988 //deadlock situation. Since boot time camera open/close calls are made only to fetch
989 //capabilities, no need of this display bw optimization.
990 //Use "service.bootanim.exit" property to know boot status.
991 property_get("service.bootanim.exit", value, "0");
992 if (atoi(value) == 1) {
993 pthread_mutex_lock(&gCamLock);
994 if (gNumCameraSessions++ == 0) {
995 setCameraLaunchStatus(true);
996 }
997 pthread_mutex_unlock(&gCamLock);
998 }
999
1000 //fill the session id needed while linking dual cam
1001 pthread_mutex_lock(&gCamLock);
1002 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1003 &sessionId[mCameraId]);
1004 pthread_mutex_unlock(&gCamLock);
1005
1006 if (rc < 0) {
1007 LOGE("Error, failed to get sessiion id");
1008 return UNKNOWN_ERROR;
1009 } else {
1010 //Allocate related cam sync buffer
1011 //this is needed for the payload that goes along with bundling cmd for related
1012 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001013 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1014 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001015 if(rc != OK) {
1016 rc = NO_MEMORY;
1017 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1018 return NO_MEMORY;
1019 }
1020
1021 //Map memory for related cam sync buffer
1022 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001023 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1024 m_pDualCamCmdHeap->getFd(0),
1025 sizeof(cam_dual_camera_cmd_info_t),
1026 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001027 if(rc < 0) {
1028 LOGE("Dualcam: failed to map Related cam sync buffer");
1029 rc = FAILED_TRANSACTION;
1030 return NO_MEMORY;
1031 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001032 m_pDualCamCmdPtr =
1033 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001034 }
1035
1036 LOGH("mCameraId=%d",mCameraId);
1037
1038 return NO_ERROR;
1039}
1040
1041/*===========================================================================
1042 * FUNCTION : closeCamera
1043 *
1044 * DESCRIPTION: close camera
1045 *
1046 * PARAMETERS : none
1047 *
1048 * RETURN : int32_t type of status
1049 * NO_ERROR -- success
1050 * none-zero failure code
1051 *==========================================================================*/
1052int QCamera3HardwareInterface::closeCamera()
1053{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001054 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 int rc = NO_ERROR;
1056 char value[PROPERTY_VALUE_MAX];
1057
1058 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1059 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001060
1061 // unmap memory for related cam sync buffer
1062 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001063 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001064 if (NULL != m_pDualCamCmdHeap) {
1065 m_pDualCamCmdHeap->deallocate();
1066 delete m_pDualCamCmdHeap;
1067 m_pDualCamCmdHeap = NULL;
1068 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001069 }
1070
Thierry Strudel3d639192016-09-09 11:52:26 -07001071 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1072 mCameraHandle = NULL;
1073
1074 //reset session id to some invalid id
1075 pthread_mutex_lock(&gCamLock);
1076 sessionId[mCameraId] = 0xDEADBEEF;
1077 pthread_mutex_unlock(&gCamLock);
1078
1079 //Notify display HAL that there is no active camera session
1080 //but avoid calling the same during bootup. Refer to openCamera
1081 //for more details.
1082 property_get("service.bootanim.exit", value, "0");
1083 if (atoi(value) == 1) {
1084 pthread_mutex_lock(&gCamLock);
1085 if (--gNumCameraSessions == 0) {
1086 setCameraLaunchStatus(false);
1087 }
1088 pthread_mutex_unlock(&gCamLock);
1089 }
1090
Thierry Strudel3d639192016-09-09 11:52:26 -07001091 if (mExifParams.debug_params) {
1092 free(mExifParams.debug_params);
1093 mExifParams.debug_params = NULL;
1094 }
1095 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1096 LOGW("Failed to release flash for camera id: %d",
1097 mCameraId);
1098 }
1099 mState = CLOSED;
1100 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1101 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001102
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001103 {
1104 Mutex::Autolock l(gHdrPlusClientLock);
1105 if (gHdrPlusClient != nullptr) {
1106 // Disable HDR+ mode.
1107 disableHdrPlusModeLocked();
1108 // Disconnect Easel if it's connected.
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001109 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001110 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001111 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001112
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001113 if (EaselManagerClientOpened) {
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001114 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001115 if (rc != 0) {
1116 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1117 }
1118
Chien-Yu Chen44abb642017-06-02 18:00:38 -07001119 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001120 if (rc != 0) {
1121 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1122 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001123 }
1124 }
1125
Thierry Strudel3d639192016-09-09 11:52:26 -07001126 return rc;
1127}
1128
1129/*===========================================================================
1130 * FUNCTION : initialize
1131 *
1132 * DESCRIPTION: Initialize frameworks callback functions
1133 *
1134 * PARAMETERS :
1135 * @callback_ops : callback function to frameworks
1136 *
1137 * RETURN :
1138 *
1139 *==========================================================================*/
1140int QCamera3HardwareInterface::initialize(
1141 const struct camera3_callback_ops *callback_ops)
1142{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001143 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001144 int rc;
1145
1146 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1147 pthread_mutex_lock(&mMutex);
1148
1149 // Validate current state
1150 switch (mState) {
1151 case OPENED:
1152 /* valid state */
1153 break;
1154 default:
1155 LOGE("Invalid state %d", mState);
1156 rc = -ENODEV;
1157 goto err1;
1158 }
1159
1160 rc = initParameters();
1161 if (rc < 0) {
1162 LOGE("initParamters failed %d", rc);
1163 goto err1;
1164 }
1165 mCallbackOps = callback_ops;
1166
1167 mChannelHandle = mCameraHandle->ops->add_channel(
1168 mCameraHandle->camera_handle, NULL, NULL, this);
1169 if (mChannelHandle == 0) {
1170 LOGE("add_channel failed");
1171 rc = -ENOMEM;
1172 pthread_mutex_unlock(&mMutex);
1173 return rc;
1174 }
1175
1176 pthread_mutex_unlock(&mMutex);
1177 mCameraInitialized = true;
1178 mState = INITIALIZED;
1179 LOGI("X");
1180 return 0;
1181
1182err1:
1183 pthread_mutex_unlock(&mMutex);
1184 return rc;
1185}
1186
1187/*===========================================================================
1188 * FUNCTION : validateStreamDimensions
1189 *
1190 * DESCRIPTION: Check if the configuration requested are those advertised
1191 *
1192 * PARAMETERS :
1193 * @stream_list : streams to be configured
1194 *
1195 * RETURN :
1196 *
1197 *==========================================================================*/
1198int QCamera3HardwareInterface::validateStreamDimensions(
1199 camera3_stream_configuration_t *streamList)
1200{
1201 int rc = NO_ERROR;
1202 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001203 uint32_t depthWidth = 0;
1204 uint32_t depthHeight = 0;
1205 if (mPDSupported) {
1206 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1207 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1208 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001209
1210 camera3_stream_t *inputStream = NULL;
1211 /*
1212 * Loop through all streams to find input stream if it exists*
1213 */
1214 for (size_t i = 0; i< streamList->num_streams; i++) {
1215 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1216 if (inputStream != NULL) {
1217 LOGE("Error, Multiple input streams requested");
1218 return -EINVAL;
1219 }
1220 inputStream = streamList->streams[i];
1221 }
1222 }
1223 /*
1224 * Loop through all streams requested in configuration
1225 * Check if unsupported sizes have been requested on any of them
1226 */
1227 for (size_t j = 0; j < streamList->num_streams; j++) {
1228 bool sizeFound = false;
1229 camera3_stream_t *newStream = streamList->streams[j];
1230
1231 uint32_t rotatedHeight = newStream->height;
1232 uint32_t rotatedWidth = newStream->width;
1233 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1234 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1235 rotatedHeight = newStream->width;
1236 rotatedWidth = newStream->height;
1237 }
1238
1239 /*
1240 * Sizes are different for each type of stream format check against
1241 * appropriate table.
1242 */
1243 switch (newStream->format) {
1244 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1245 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1246 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001247 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1248 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1249 mPDSupported) {
1250 if ((depthWidth == newStream->width) &&
1251 (depthHeight == newStream->height)) {
1252 sizeFound = true;
1253 }
1254 break;
1255 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001256 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1257 for (size_t i = 0; i < count; i++) {
1258 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1259 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1260 sizeFound = true;
1261 break;
1262 }
1263 }
1264 break;
1265 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001266 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1267 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001268 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001269 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001270 if ((depthSamplesCount == newStream->width) &&
1271 (1 == newStream->height)) {
1272 sizeFound = true;
1273 }
1274 break;
1275 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001276 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1277 /* Verify set size against generated sizes table */
1278 for (size_t i = 0; i < count; i++) {
1279 if (((int32_t)rotatedWidth ==
1280 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1281 ((int32_t)rotatedHeight ==
1282 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1283 sizeFound = true;
1284 break;
1285 }
1286 }
1287 break;
1288 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1289 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1290 default:
1291 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1292 || newStream->stream_type == CAMERA3_STREAM_INPUT
1293 || IS_USAGE_ZSL(newStream->usage)) {
1294 if (((int32_t)rotatedWidth ==
1295 gCamCapability[mCameraId]->active_array_size.width) &&
1296 ((int32_t)rotatedHeight ==
1297 gCamCapability[mCameraId]->active_array_size.height)) {
1298 sizeFound = true;
1299 break;
1300 }
1301 /* We could potentially break here to enforce ZSL stream
1302 * set from frameworks always is full active array size
1303 * but it is not clear from the spc if framework will always
1304 * follow that, also we have logic to override to full array
1305 * size, so keeping the logic lenient at the moment
1306 */
1307 }
1308 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1309 MAX_SIZES_CNT);
1310 for (size_t i = 0; i < count; i++) {
1311 if (((int32_t)rotatedWidth ==
1312 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1313 ((int32_t)rotatedHeight ==
1314 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1315 sizeFound = true;
1316 break;
1317 }
1318 }
1319 break;
1320 } /* End of switch(newStream->format) */
1321
1322 /* We error out even if a single stream has unsupported size set */
1323 if (!sizeFound) {
1324 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1325 rotatedWidth, rotatedHeight, newStream->format,
1326 gCamCapability[mCameraId]->active_array_size.width,
1327 gCamCapability[mCameraId]->active_array_size.height);
1328 rc = -EINVAL;
1329 break;
1330 }
1331 } /* End of for each stream */
1332 return rc;
1333}
1334
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001335/*===========================================================================
1336 * FUNCTION : validateUsageFlags
1337 *
1338 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1339 *
1340 * PARAMETERS :
1341 * @stream_list : streams to be configured
1342 *
1343 * RETURN :
1344 * NO_ERROR if the usage flags are supported
1345 * error code if usage flags are not supported
1346 *
1347 *==========================================================================*/
1348int QCamera3HardwareInterface::validateUsageFlags(
1349 const camera3_stream_configuration_t* streamList)
1350{
1351 for (size_t j = 0; j < streamList->num_streams; j++) {
1352 const camera3_stream_t *newStream = streamList->streams[j];
1353
1354 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1355 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1356 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1357 continue;
1358 }
1359
Jason Leec4cf5032017-05-24 18:31:41 -07001360 // Here we only care whether it's EIS3 or not
1361 char is_type_value[PROPERTY_VALUE_MAX];
1362 property_get("persist.camera.is_type", is_type_value, "4");
1363 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1364 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1365 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1366 isType = IS_TYPE_NONE;
1367
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1369 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1370 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1371 bool forcePreviewUBWC = true;
1372 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1373 forcePreviewUBWC = false;
1374 }
1375 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001376 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001377 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001378 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001379 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001380 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001381
1382 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1383 // So color spaces will always match.
1384
1385 // Check whether underlying formats of shared streams match.
1386 if (isVideo && isPreview && videoFormat != previewFormat) {
1387 LOGE("Combined video and preview usage flag is not supported");
1388 return -EINVAL;
1389 }
1390 if (isPreview && isZSL && previewFormat != zslFormat) {
1391 LOGE("Combined preview and zsl usage flag is not supported");
1392 return -EINVAL;
1393 }
1394 if (isVideo && isZSL && videoFormat != zslFormat) {
1395 LOGE("Combined video and zsl usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 }
1399 return NO_ERROR;
1400}
1401
1402/*===========================================================================
1403 * FUNCTION : validateUsageFlagsForEis
1404 *
1405 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1406 *
1407 * PARAMETERS :
1408 * @stream_list : streams to be configured
1409 *
1410 * RETURN :
1411 * NO_ERROR if the usage flags are supported
1412 * error code if usage flags are not supported
1413 *
1414 *==========================================================================*/
1415int QCamera3HardwareInterface::validateUsageFlagsForEis(
1416 const camera3_stream_configuration_t* streamList)
1417{
1418 for (size_t j = 0; j < streamList->num_streams; j++) {
1419 const camera3_stream_t *newStream = streamList->streams[j];
1420
1421 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1422 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1423
1424 // Because EIS is "hard-coded" for certain use case, and current
1425 // implementation doesn't support shared preview and video on the same
1426 // stream, return failure if EIS is forced on.
1427 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1428 LOGE("Combined video and preview usage flag is not supported due to EIS");
1429 return -EINVAL;
1430 }
1431 }
1432 return NO_ERROR;
1433}
1434
Thierry Strudel3d639192016-09-09 11:52:26 -07001435/*==============================================================================
1436 * FUNCTION : isSupportChannelNeeded
1437 *
1438 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1439 *
1440 * PARAMETERS :
1441 * @stream_list : streams to be configured
1442 * @stream_config_info : the config info for streams to be configured
1443 *
1444 * RETURN : Boolen true/false decision
1445 *
1446 *==========================================================================*/
1447bool QCamera3HardwareInterface::isSupportChannelNeeded(
1448 camera3_stream_configuration_t *streamList,
1449 cam_stream_size_info_t stream_config_info)
1450{
1451 uint32_t i;
1452 bool pprocRequested = false;
1453 /* Check for conditions where PProc pipeline does not have any streams*/
1454 for (i = 0; i < stream_config_info.num_streams; i++) {
1455 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1456 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1457 pprocRequested = true;
1458 break;
1459 }
1460 }
1461
1462 if (pprocRequested == false )
1463 return true;
1464
1465 /* Dummy stream needed if only raw or jpeg streams present */
1466 for (i = 0; i < streamList->num_streams; i++) {
1467 switch(streamList->streams[i]->format) {
1468 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1469 case HAL_PIXEL_FORMAT_RAW10:
1470 case HAL_PIXEL_FORMAT_RAW16:
1471 case HAL_PIXEL_FORMAT_BLOB:
1472 break;
1473 default:
1474 return false;
1475 }
1476 }
1477 return true;
1478}
1479
1480/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001481 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001483 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001484 *
1485 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001486 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001487 *
1488 * RETURN : int32_t type of status
1489 * NO_ERROR -- success
1490 * none-zero failure code
1491 *
1492 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001493int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001494{
1495 int32_t rc = NO_ERROR;
1496
1497 cam_dimension_t max_dim = {0, 0};
1498 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1499 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1500 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1501 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1502 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1503 }
1504
1505 clear_metadata_buffer(mParameters);
1506
1507 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1508 max_dim);
1509 if (rc != NO_ERROR) {
1510 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1511 return rc;
1512 }
1513
1514 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1515 if (rc != NO_ERROR) {
1516 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1517 return rc;
1518 }
1519
1520 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001521 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001522
1523 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1524 mParameters);
1525 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001526 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001527 return rc;
1528 }
1529
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001530 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001531 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1532 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1533 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1534 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1535 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001536
1537 return rc;
1538}
1539
1540/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001541 * FUNCTION : getCurrentSensorModeInfo
1542 *
1543 * DESCRIPTION: Get sensor mode information that is currently selected.
1544 *
1545 * PARAMETERS :
1546 * @sensorModeInfo : sensor mode information (output)
1547 *
1548 * RETURN : int32_t type of status
1549 * NO_ERROR -- success
1550 * none-zero failure code
1551 *
1552 *==========================================================================*/
1553int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1554{
1555 int32_t rc = NO_ERROR;
1556
1557 clear_metadata_buffer(mParameters);
1558 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1559
1560 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1561 mParameters);
1562 if (rc != NO_ERROR) {
1563 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1564 return rc;
1565 }
1566
1567 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1568 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1569 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1570 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1571 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1572 sensorModeInfo.num_raw_bits);
1573
1574 return rc;
1575}
1576
1577/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001578 * FUNCTION : addToPPFeatureMask
1579 *
1580 * DESCRIPTION: add additional features to pp feature mask based on
1581 * stream type and usecase
1582 *
1583 * PARAMETERS :
1584 * @stream_format : stream type for feature mask
1585 * @stream_idx : stream idx within postprocess_mask list to change
1586 *
1587 * RETURN : NULL
1588 *
1589 *==========================================================================*/
1590void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1591 uint32_t stream_idx)
1592{
1593 char feature_mask_value[PROPERTY_VALUE_MAX];
1594 cam_feature_mask_t feature_mask;
1595 int args_converted;
1596 int property_len;
1597
1598 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001599#ifdef _LE_CAMERA_
1600 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1601 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1602 property_len = property_get("persist.camera.hal3.feature",
1603 feature_mask_value, swtnr_feature_mask_value);
1604#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001605 property_len = property_get("persist.camera.hal3.feature",
1606 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001607#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001608 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1609 (feature_mask_value[1] == 'x')) {
1610 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1611 } else {
1612 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1613 }
1614 if (1 != args_converted) {
1615 feature_mask = 0;
1616 LOGE("Wrong feature mask %s", feature_mask_value);
1617 return;
1618 }
1619
1620 switch (stream_format) {
1621 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1622 /* Add LLVD to pp feature mask only if video hint is enabled */
1623 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1624 mStreamConfigInfo.postprocess_mask[stream_idx]
1625 |= CAM_QTI_FEATURE_SW_TNR;
1626 LOGH("Added SW TNR to pp feature mask");
1627 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1628 mStreamConfigInfo.postprocess_mask[stream_idx]
1629 |= CAM_QCOM_FEATURE_LLVD;
1630 LOGH("Added LLVD SeeMore to pp feature mask");
1631 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001632 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1633 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1634 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1635 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001636 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1637 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1638 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1639 CAM_QTI_FEATURE_BINNING_CORRECTION;
1640 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001641 break;
1642 }
1643 default:
1644 break;
1645 }
1646 LOGD("PP feature mask %llx",
1647 mStreamConfigInfo.postprocess_mask[stream_idx]);
1648}
1649
1650/*==============================================================================
1651 * FUNCTION : updateFpsInPreviewBuffer
1652 *
1653 * DESCRIPTION: update FPS information in preview buffer.
1654 *
1655 * PARAMETERS :
1656 * @metadata : pointer to metadata buffer
1657 * @frame_number: frame_number to look for in pending buffer list
1658 *
1659 * RETURN : None
1660 *
1661 *==========================================================================*/
1662void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1663 uint32_t frame_number)
1664{
1665 // Mark all pending buffers for this particular request
1666 // with corresponding framerate information
1667 for (List<PendingBuffersInRequest>::iterator req =
1668 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1669 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1670 for(List<PendingBufferInfo>::iterator j =
1671 req->mPendingBufferList.begin();
1672 j != req->mPendingBufferList.end(); j++) {
1673 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1674 if ((req->frame_number == frame_number) &&
1675 (channel->getStreamTypeMask() &
1676 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1677 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1678 CAM_INTF_PARM_FPS_RANGE, metadata) {
1679 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1680 struct private_handle_t *priv_handle =
1681 (struct private_handle_t *)(*(j->buffer));
1682 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1683 }
1684 }
1685 }
1686 }
1687}
1688
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001689/*==============================================================================
1690 * FUNCTION : updateTimeStampInPendingBuffers
1691 *
1692 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1693 * of a frame number
1694 *
1695 * PARAMETERS :
1696 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1697 * @timestamp : timestamp to be set
1698 *
1699 * RETURN : None
1700 *
1701 *==========================================================================*/
1702void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1703 uint32_t frameNumber, nsecs_t timestamp)
1704{
1705 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1706 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1707 if (req->frame_number != frameNumber)
1708 continue;
1709
1710 for (auto k = req->mPendingBufferList.begin();
1711 k != req->mPendingBufferList.end(); k++ ) {
1712 struct private_handle_t *priv_handle =
1713 (struct private_handle_t *) (*(k->buffer));
1714 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1715 }
1716 }
1717 return;
1718}
1719
Thierry Strudel3d639192016-09-09 11:52:26 -07001720/*===========================================================================
1721 * FUNCTION : configureStreams
1722 *
1723 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1724 * and output streams.
1725 *
1726 * PARAMETERS :
1727 * @stream_list : streams to be configured
1728 *
1729 * RETURN :
1730 *
1731 *==========================================================================*/
1732int QCamera3HardwareInterface::configureStreams(
1733 camera3_stream_configuration_t *streamList)
1734{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001735 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001736 int rc = 0;
1737
1738 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001739 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001740 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001741 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001742
1743 return rc;
1744}
1745
1746/*===========================================================================
1747 * FUNCTION : configureStreamsPerfLocked
1748 *
1749 * DESCRIPTION: configureStreams while perfLock is held.
1750 *
1751 * PARAMETERS :
1752 * @stream_list : streams to be configured
1753 *
1754 * RETURN : int32_t type of status
1755 * NO_ERROR -- success
1756 * none-zero failure code
1757 *==========================================================================*/
1758int QCamera3HardwareInterface::configureStreamsPerfLocked(
1759 camera3_stream_configuration_t *streamList)
1760{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001761 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001762 int rc = 0;
1763
1764 // Sanity check stream_list
1765 if (streamList == NULL) {
1766 LOGE("NULL stream configuration");
1767 return BAD_VALUE;
1768 }
1769 if (streamList->streams == NULL) {
1770 LOGE("NULL stream list");
1771 return BAD_VALUE;
1772 }
1773
1774 if (streamList->num_streams < 1) {
1775 LOGE("Bad number of streams requested: %d",
1776 streamList->num_streams);
1777 return BAD_VALUE;
1778 }
1779
1780 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1781 LOGE("Maximum number of streams %d exceeded: %d",
1782 MAX_NUM_STREAMS, streamList->num_streams);
1783 return BAD_VALUE;
1784 }
1785
Jason Leec4cf5032017-05-24 18:31:41 -07001786 mOpMode = streamList->operation_mode;
1787 LOGD("mOpMode: %d", mOpMode);
1788
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001789 rc = validateUsageFlags(streamList);
1790 if (rc != NO_ERROR) {
1791 return rc;
1792 }
1793
Thierry Strudel3d639192016-09-09 11:52:26 -07001794 /* first invalidate all the steams in the mStreamList
1795 * if they appear again, they will be validated */
1796 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1797 it != mStreamInfo.end(); it++) {
1798 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1799 if (channel) {
1800 channel->stop();
1801 }
1802 (*it)->status = INVALID;
1803 }
1804
1805 if (mRawDumpChannel) {
1806 mRawDumpChannel->stop();
1807 delete mRawDumpChannel;
1808 mRawDumpChannel = NULL;
1809 }
1810
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001811 if (mHdrPlusRawSrcChannel) {
1812 mHdrPlusRawSrcChannel->stop();
1813 delete mHdrPlusRawSrcChannel;
1814 mHdrPlusRawSrcChannel = NULL;
1815 }
1816
Thierry Strudel3d639192016-09-09 11:52:26 -07001817 if (mSupportChannel)
1818 mSupportChannel->stop();
1819
1820 if (mAnalysisChannel) {
1821 mAnalysisChannel->stop();
1822 }
1823 if (mMetadataChannel) {
1824 /* If content of mStreamInfo is not 0, there is metadata stream */
1825 mMetadataChannel->stop();
1826 }
1827 if (mChannelHandle) {
1828 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001829 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 LOGD("stopping channel %d", mChannelHandle);
1831 }
1832
1833 pthread_mutex_lock(&mMutex);
1834
1835 // Check state
1836 switch (mState) {
1837 case INITIALIZED:
1838 case CONFIGURED:
1839 case STARTED:
1840 /* valid state */
1841 break;
1842 default:
1843 LOGE("Invalid state %d", mState);
1844 pthread_mutex_unlock(&mMutex);
1845 return -ENODEV;
1846 }
1847
1848 /* Check whether we have video stream */
1849 m_bIs4KVideo = false;
1850 m_bIsVideo = false;
1851 m_bEisSupportedSize = false;
1852 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001853 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001855 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001856 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001857 uint32_t videoWidth = 0U;
1858 uint32_t videoHeight = 0U;
1859 size_t rawStreamCnt = 0;
1860 size_t stallStreamCnt = 0;
1861 size_t processedStreamCnt = 0;
1862 // Number of streams on ISP encoder path
1863 size_t numStreamsOnEncoder = 0;
1864 size_t numYuv888OnEncoder = 0;
1865 bool bYuv888OverrideJpeg = false;
1866 cam_dimension_t largeYuv888Size = {0, 0};
1867 cam_dimension_t maxViewfinderSize = {0, 0};
1868 bool bJpegExceeds4K = false;
1869 bool bJpegOnEncoder = false;
1870 bool bUseCommonFeatureMask = false;
1871 cam_feature_mask_t commonFeatureMask = 0;
1872 bool bSmallJpegSize = false;
1873 uint32_t width_ratio;
1874 uint32_t height_ratio;
1875 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1876 camera3_stream_t *inputStream = NULL;
1877 bool isJpeg = false;
1878 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001879 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001880 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001881
1882 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1883
1884 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001885 uint8_t eis_prop_set;
1886 uint32_t maxEisWidth = 0;
1887 uint32_t maxEisHeight = 0;
1888
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001889 // Initialize all instant AEC related variables
1890 mInstantAEC = false;
1891 mResetInstantAEC = false;
1892 mInstantAECSettledFrameNumber = 0;
1893 mAecSkipDisplayFrameBound = 0;
1894 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001895 mCurrFeatureState = 0;
1896 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001897
Thierry Strudel3d639192016-09-09 11:52:26 -07001898 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1899
1900 size_t count = IS_TYPE_MAX;
1901 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1902 for (size_t i = 0; i < count; i++) {
1903 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001904 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1905 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001906 break;
1907 }
1908 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001909
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001910 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001911 maxEisWidth = MAX_EIS_WIDTH;
1912 maxEisHeight = MAX_EIS_HEIGHT;
1913 }
1914
1915 /* EIS setprop control */
1916 char eis_prop[PROPERTY_VALUE_MAX];
1917 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001918 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001919 eis_prop_set = (uint8_t)atoi(eis_prop);
1920
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001921 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001922 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1923
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001924 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1925 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001926
Thierry Strudel3d639192016-09-09 11:52:26 -07001927 /* stream configurations */
1928 for (size_t i = 0; i < streamList->num_streams; i++) {
1929 camera3_stream_t *newStream = streamList->streams[i];
1930 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1931 "height = %d, rotation = %d, usage = 0x%x",
1932 i, newStream->stream_type, newStream->format,
1933 newStream->width, newStream->height, newStream->rotation,
1934 newStream->usage);
1935 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1936 newStream->stream_type == CAMERA3_STREAM_INPUT){
1937 isZsl = true;
1938 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001939 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1940 IS_USAGE_PREVIEW(newStream->usage)) {
1941 isPreview = true;
1942 }
1943
Thierry Strudel3d639192016-09-09 11:52:26 -07001944 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1945 inputStream = newStream;
1946 }
1947
Emilian Peev7650c122017-01-19 08:24:33 -08001948 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1949 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001950 isJpeg = true;
1951 jpegSize.width = newStream->width;
1952 jpegSize.height = newStream->height;
1953 if (newStream->width > VIDEO_4K_WIDTH ||
1954 newStream->height > VIDEO_4K_HEIGHT)
1955 bJpegExceeds4K = true;
1956 }
1957
1958 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1959 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1960 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001961 // In HAL3 we can have multiple different video streams.
1962 // The variables video width and height are used below as
1963 // dimensions of the biggest of them
1964 if (videoWidth < newStream->width ||
1965 videoHeight < newStream->height) {
1966 videoWidth = newStream->width;
1967 videoHeight = newStream->height;
1968 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001969 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1970 (VIDEO_4K_HEIGHT <= newStream->height)) {
1971 m_bIs4KVideo = true;
1972 }
1973 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1974 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001975
Thierry Strudel3d639192016-09-09 11:52:26 -07001976 }
1977 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1978 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1979 switch (newStream->format) {
1980 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001981 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1982 depthPresent = true;
1983 break;
1984 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001985 stallStreamCnt++;
1986 if (isOnEncoder(maxViewfinderSize, newStream->width,
1987 newStream->height)) {
1988 numStreamsOnEncoder++;
1989 bJpegOnEncoder = true;
1990 }
1991 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1992 newStream->width);
1993 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1994 newStream->height);;
1995 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1996 "FATAL: max_downscale_factor cannot be zero and so assert");
1997 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1998 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1999 LOGH("Setting small jpeg size flag to true");
2000 bSmallJpegSize = true;
2001 }
2002 break;
2003 case HAL_PIXEL_FORMAT_RAW10:
2004 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2005 case HAL_PIXEL_FORMAT_RAW16:
2006 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002007 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2008 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2009 pdStatCount++;
2010 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002011 break;
2012 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2013 processedStreamCnt++;
2014 if (isOnEncoder(maxViewfinderSize, newStream->width,
2015 newStream->height)) {
2016 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2017 !IS_USAGE_ZSL(newStream->usage)) {
2018 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2019 }
2020 numStreamsOnEncoder++;
2021 }
2022 break;
2023 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2024 processedStreamCnt++;
2025 if (isOnEncoder(maxViewfinderSize, newStream->width,
2026 newStream->height)) {
2027 // If Yuv888 size is not greater than 4K, set feature mask
2028 // to SUPERSET so that it support concurrent request on
2029 // YUV and JPEG.
2030 if (newStream->width <= VIDEO_4K_WIDTH &&
2031 newStream->height <= VIDEO_4K_HEIGHT) {
2032 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2033 }
2034 numStreamsOnEncoder++;
2035 numYuv888OnEncoder++;
2036 largeYuv888Size.width = newStream->width;
2037 largeYuv888Size.height = newStream->height;
2038 }
2039 break;
2040 default:
2041 processedStreamCnt++;
2042 if (isOnEncoder(maxViewfinderSize, newStream->width,
2043 newStream->height)) {
2044 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2045 numStreamsOnEncoder++;
2046 }
2047 break;
2048 }
2049
2050 }
2051 }
2052
2053 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2054 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2055 !m_bIsVideo) {
2056 m_bEisEnable = false;
2057 }
2058
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002059 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2060 pthread_mutex_unlock(&mMutex);
2061 return -EINVAL;
2062 }
2063
Thierry Strudel54dc9782017-02-15 12:12:10 -08002064 uint8_t forceEnableTnr = 0;
2065 char tnr_prop[PROPERTY_VALUE_MAX];
2066 memset(tnr_prop, 0, sizeof(tnr_prop));
2067 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2068 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2069
Thierry Strudel3d639192016-09-09 11:52:26 -07002070 /* Logic to enable/disable TNR based on specific config size/etc.*/
2071 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002072 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2073 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002074 else if (forceEnableTnr)
2075 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002076
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002077 char videoHdrProp[PROPERTY_VALUE_MAX];
2078 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2079 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2080 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2081
2082 if (hdr_mode_prop == 1 && m_bIsVideo &&
2083 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2084 m_bVideoHdrEnabled = true;
2085 else
2086 m_bVideoHdrEnabled = false;
2087
2088
Thierry Strudel3d639192016-09-09 11:52:26 -07002089 /* Check if num_streams is sane */
2090 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2091 rawStreamCnt > MAX_RAW_STREAMS ||
2092 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2093 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2094 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2095 pthread_mutex_unlock(&mMutex);
2096 return -EINVAL;
2097 }
2098 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002099 if (isZsl && m_bIs4KVideo) {
2100 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002101 pthread_mutex_unlock(&mMutex);
2102 return -EINVAL;
2103 }
2104 /* Check if stream sizes are sane */
2105 if (numStreamsOnEncoder > 2) {
2106 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2107 pthread_mutex_unlock(&mMutex);
2108 return -EINVAL;
2109 } else if (1 < numStreamsOnEncoder){
2110 bUseCommonFeatureMask = true;
2111 LOGH("Multiple streams above max viewfinder size, common mask needed");
2112 }
2113
2114 /* Check if BLOB size is greater than 4k in 4k recording case */
2115 if (m_bIs4KVideo && bJpegExceeds4K) {
2116 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2117 pthread_mutex_unlock(&mMutex);
2118 return -EINVAL;
2119 }
2120
Emilian Peev7650c122017-01-19 08:24:33 -08002121 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2122 depthPresent) {
2123 LOGE("HAL doesn't support depth streams in HFR mode!");
2124 pthread_mutex_unlock(&mMutex);
2125 return -EINVAL;
2126 }
2127
Thierry Strudel3d639192016-09-09 11:52:26 -07002128 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2129 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2130 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2131 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2132 // configurations:
2133 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2134 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2135 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2136 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2137 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2138 __func__);
2139 pthread_mutex_unlock(&mMutex);
2140 return -EINVAL;
2141 }
2142
2143 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2144 // the YUV stream's size is greater or equal to the JPEG size, set common
2145 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2146 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2147 jpegSize.width, jpegSize.height) &&
2148 largeYuv888Size.width > jpegSize.width &&
2149 largeYuv888Size.height > jpegSize.height) {
2150 bYuv888OverrideJpeg = true;
2151 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2152 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2153 }
2154
2155 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2156 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2157 commonFeatureMask);
2158 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2159 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2160
2161 rc = validateStreamDimensions(streamList);
2162 if (rc == NO_ERROR) {
2163 rc = validateStreamRotations(streamList);
2164 }
2165 if (rc != NO_ERROR) {
2166 LOGE("Invalid stream configuration requested!");
2167 pthread_mutex_unlock(&mMutex);
2168 return rc;
2169 }
2170
Emilian Peev0f3c3162017-03-15 12:57:46 +00002171 if (1 < pdStatCount) {
2172 LOGE("HAL doesn't support multiple PD streams");
2173 pthread_mutex_unlock(&mMutex);
2174 return -EINVAL;
2175 }
2176
2177 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2178 (1 == pdStatCount)) {
2179 LOGE("HAL doesn't support PD streams in HFR mode!");
2180 pthread_mutex_unlock(&mMutex);
2181 return -EINVAL;
2182 }
2183
Thierry Strudel3d639192016-09-09 11:52:26 -07002184 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2185 for (size_t i = 0; i < streamList->num_streams; i++) {
2186 camera3_stream_t *newStream = streamList->streams[i];
2187 LOGH("newStream type = %d, stream format = %d "
2188 "stream size : %d x %d, stream rotation = %d",
2189 newStream->stream_type, newStream->format,
2190 newStream->width, newStream->height, newStream->rotation);
2191 //if the stream is in the mStreamList validate it
2192 bool stream_exists = false;
2193 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2194 it != mStreamInfo.end(); it++) {
2195 if ((*it)->stream == newStream) {
2196 QCamera3ProcessingChannel *channel =
2197 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2198 stream_exists = true;
2199 if (channel)
2200 delete channel;
2201 (*it)->status = VALID;
2202 (*it)->stream->priv = NULL;
2203 (*it)->channel = NULL;
2204 }
2205 }
2206 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2207 //new stream
2208 stream_info_t* stream_info;
2209 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2210 if (!stream_info) {
2211 LOGE("Could not allocate stream info");
2212 rc = -ENOMEM;
2213 pthread_mutex_unlock(&mMutex);
2214 return rc;
2215 }
2216 stream_info->stream = newStream;
2217 stream_info->status = VALID;
2218 stream_info->channel = NULL;
2219 mStreamInfo.push_back(stream_info);
2220 }
2221 /* Covers Opaque ZSL and API1 F/W ZSL */
2222 if (IS_USAGE_ZSL(newStream->usage)
2223 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2224 if (zslStream != NULL) {
2225 LOGE("Multiple input/reprocess streams requested!");
2226 pthread_mutex_unlock(&mMutex);
2227 return BAD_VALUE;
2228 }
2229 zslStream = newStream;
2230 }
2231 /* Covers YUV reprocess */
2232 if (inputStream != NULL) {
2233 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2234 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2235 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2236 && inputStream->width == newStream->width
2237 && inputStream->height == newStream->height) {
2238 if (zslStream != NULL) {
2239 /* This scenario indicates multiple YUV streams with same size
2240 * as input stream have been requested, since zsl stream handle
2241 * is solely use for the purpose of overriding the size of streams
2242 * which share h/w streams we will just make a guess here as to
2243 * which of the stream is a ZSL stream, this will be refactored
2244 * once we make generic logic for streams sharing encoder output
2245 */
2246 LOGH("Warning, Multiple ip/reprocess streams requested!");
2247 }
2248 zslStream = newStream;
2249 }
2250 }
2251 }
2252
2253 /* If a zsl stream is set, we know that we have configured at least one input or
2254 bidirectional stream */
2255 if (NULL != zslStream) {
2256 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2257 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2258 mInputStreamInfo.format = zslStream->format;
2259 mInputStreamInfo.usage = zslStream->usage;
2260 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2261 mInputStreamInfo.dim.width,
2262 mInputStreamInfo.dim.height,
2263 mInputStreamInfo.format, mInputStreamInfo.usage);
2264 }
2265
2266 cleanAndSortStreamInfo();
2267 if (mMetadataChannel) {
2268 delete mMetadataChannel;
2269 mMetadataChannel = NULL;
2270 }
2271 if (mSupportChannel) {
2272 delete mSupportChannel;
2273 mSupportChannel = NULL;
2274 }
2275
2276 if (mAnalysisChannel) {
2277 delete mAnalysisChannel;
2278 mAnalysisChannel = NULL;
2279 }
2280
2281 if (mDummyBatchChannel) {
2282 delete mDummyBatchChannel;
2283 mDummyBatchChannel = NULL;
2284 }
2285
Emilian Peev7650c122017-01-19 08:24:33 -08002286 if (mDepthChannel) {
2287 mDepthChannel = NULL;
2288 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002289 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002290
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002291 mShutterDispatcher.clear();
2292 mOutputBufferDispatcher.clear();
2293
Thierry Strudel2896d122017-02-23 19:18:03 -08002294 char is_type_value[PROPERTY_VALUE_MAX];
2295 property_get("persist.camera.is_type", is_type_value, "4");
2296 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2297
Binhao Line406f062017-05-03 14:39:44 -07002298 char property_value[PROPERTY_VALUE_MAX];
2299 property_get("persist.camera.gzoom.at", property_value, "0");
2300 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002301 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2302 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2303 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2304 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002305
2306 property_get("persist.camera.gzoom.4k", property_value, "0");
2307 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2308
Thierry Strudel3d639192016-09-09 11:52:26 -07002309 //Create metadata channel and initialize it
2310 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2311 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2312 gCamCapability[mCameraId]->color_arrangement);
2313 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2314 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002315 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002316 if (mMetadataChannel == NULL) {
2317 LOGE("failed to allocate metadata channel");
2318 rc = -ENOMEM;
2319 pthread_mutex_unlock(&mMutex);
2320 return rc;
2321 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002322 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002323 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2324 if (rc < 0) {
2325 LOGE("metadata channel initialization failed");
2326 delete mMetadataChannel;
2327 mMetadataChannel = NULL;
2328 pthread_mutex_unlock(&mMutex);
2329 return rc;
2330 }
2331
Thierry Strudel2896d122017-02-23 19:18:03 -08002332 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002333 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002334 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002335 // Keep track of preview/video streams indices.
2336 // There could be more than one preview streams, but only one video stream.
2337 int32_t video_stream_idx = -1;
2338 int32_t preview_stream_idx[streamList->num_streams];
2339 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002340 bool previewTnr[streamList->num_streams];
2341 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2342 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2343 // Loop through once to determine preview TNR conditions before creating channels.
2344 for (size_t i = 0; i < streamList->num_streams; i++) {
2345 camera3_stream_t *newStream = streamList->streams[i];
2346 uint32_t stream_usage = newStream->usage;
2347 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2348 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2349 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2350 video_stream_idx = (int32_t)i;
2351 else
2352 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2353 }
2354 }
2355 // By default, preview stream TNR is disabled.
2356 // Enable TNR to the preview stream if all conditions below are satisfied:
2357 // 1. preview resolution == video resolution.
2358 // 2. video stream TNR is enabled.
2359 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2360 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2361 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2362 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2363 if (m_bTnrEnabled && m_bTnrVideo &&
2364 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2365 video_stream->width == preview_stream->width &&
2366 video_stream->height == preview_stream->height) {
2367 previewTnr[preview_stream_idx[i]] = true;
2368 }
2369 }
2370
Thierry Strudel3d639192016-09-09 11:52:26 -07002371 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2372 /* Allocate channel objects for the requested streams */
2373 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002374
Thierry Strudel3d639192016-09-09 11:52:26 -07002375 camera3_stream_t *newStream = streamList->streams[i];
2376 uint32_t stream_usage = newStream->usage;
2377 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2378 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2379 struct camera_info *p_info = NULL;
2380 pthread_mutex_lock(&gCamLock);
2381 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2382 pthread_mutex_unlock(&gCamLock);
2383 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2384 || IS_USAGE_ZSL(newStream->usage)) &&
2385 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002386 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002387 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002388 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2389 if (bUseCommonFeatureMask)
2390 zsl_ppmask = commonFeatureMask;
2391 else
2392 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002393 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002394 if (numStreamsOnEncoder > 0)
2395 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2396 else
2397 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002398 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002399 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002400 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002401 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002402 LOGH("Input stream configured, reprocess config");
2403 } else {
2404 //for non zsl streams find out the format
2405 switch (newStream->format) {
2406 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2407 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002408 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002409 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2410 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2411 /* add additional features to pp feature mask */
2412 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2413 mStreamConfigInfo.num_streams);
2414
2415 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2416 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2417 CAM_STREAM_TYPE_VIDEO;
2418 if (m_bTnrEnabled && m_bTnrVideo) {
2419 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2420 CAM_QCOM_FEATURE_CPP_TNR;
2421 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2422 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2423 ~CAM_QCOM_FEATURE_CDS;
2424 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002425 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2426 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2427 CAM_QTI_FEATURE_PPEISCORE;
2428 }
Binhao Line406f062017-05-03 14:39:44 -07002429 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2430 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2431 CAM_QCOM_FEATURE_GOOG_ZOOM;
2432 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002433 } else {
2434 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2435 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002436 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2438 CAM_QCOM_FEATURE_CPP_TNR;
2439 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2440 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2441 ~CAM_QCOM_FEATURE_CDS;
2442 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002443 if(!m_bSwTnrPreview) {
2444 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2445 ~CAM_QTI_FEATURE_SW_TNR;
2446 }
Binhao Line406f062017-05-03 14:39:44 -07002447 if (is_goog_zoom_preview_enabled) {
2448 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2449 CAM_QCOM_FEATURE_GOOG_ZOOM;
2450 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002451 padding_info.width_padding = mSurfaceStridePadding;
2452 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002453 previewSize.width = (int32_t)newStream->width;
2454 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002455 }
2456 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2457 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2458 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2459 newStream->height;
2460 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2461 newStream->width;
2462 }
2463 }
2464 break;
2465 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002466 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002467 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2468 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2469 if (bUseCommonFeatureMask)
2470 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2471 commonFeatureMask;
2472 else
2473 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2474 CAM_QCOM_FEATURE_NONE;
2475 } else {
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2477 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2478 }
2479 break;
2480 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002481 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002482 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2483 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2484 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2485 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2486 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002487 /* Remove rotation if it is not supported
2488 for 4K LiveVideo snapshot case (online processing) */
2489 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2490 CAM_QCOM_FEATURE_ROTATION)) {
2491 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2492 &= ~CAM_QCOM_FEATURE_ROTATION;
2493 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002494 } else {
2495 if (bUseCommonFeatureMask &&
2496 isOnEncoder(maxViewfinderSize, newStream->width,
2497 newStream->height)) {
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2499 } else {
2500 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2501 }
2502 }
2503 if (isZsl) {
2504 if (zslStream) {
2505 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2506 (int32_t)zslStream->width;
2507 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2508 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2510 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002511 } else {
2512 LOGE("Error, No ZSL stream identified");
2513 pthread_mutex_unlock(&mMutex);
2514 return -EINVAL;
2515 }
2516 } else if (m_bIs4KVideo) {
2517 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2518 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2519 } else if (bYuv888OverrideJpeg) {
2520 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2521 (int32_t)largeYuv888Size.width;
2522 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2523 (int32_t)largeYuv888Size.height;
2524 }
2525 break;
2526 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2527 case HAL_PIXEL_FORMAT_RAW16:
2528 case HAL_PIXEL_FORMAT_RAW10:
2529 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2530 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2531 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002532 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2533 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2534 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2535 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2536 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2537 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2538 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2539 gCamCapability[mCameraId]->dt[mPDIndex];
2540 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2541 gCamCapability[mCameraId]->vc[mPDIndex];
2542 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002543 break;
2544 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002545 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002546 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2547 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2548 break;
2549 }
2550 }
2551
2552 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2553 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2554 gCamCapability[mCameraId]->color_arrangement);
2555
2556 if (newStream->priv == NULL) {
2557 //New stream, construct channel
2558 switch (newStream->stream_type) {
2559 case CAMERA3_STREAM_INPUT:
2560 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2561 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2562 break;
2563 case CAMERA3_STREAM_BIDIRECTIONAL:
2564 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2565 GRALLOC_USAGE_HW_CAMERA_WRITE;
2566 break;
2567 case CAMERA3_STREAM_OUTPUT:
2568 /* For video encoding stream, set read/write rarely
2569 * flag so that they may be set to un-cached */
2570 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2571 newStream->usage |=
2572 (GRALLOC_USAGE_SW_READ_RARELY |
2573 GRALLOC_USAGE_SW_WRITE_RARELY |
2574 GRALLOC_USAGE_HW_CAMERA_WRITE);
2575 else if (IS_USAGE_ZSL(newStream->usage))
2576 {
2577 LOGD("ZSL usage flag skipping");
2578 }
2579 else if (newStream == zslStream
2580 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2581 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2582 } else
2583 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2584 break;
2585 default:
2586 LOGE("Invalid stream_type %d", newStream->stream_type);
2587 break;
2588 }
2589
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002590 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002591 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2592 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2593 QCamera3ProcessingChannel *channel = NULL;
2594 switch (newStream->format) {
2595 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2596 if ((newStream->usage &
2597 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2598 (streamList->operation_mode ==
2599 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2600 ) {
2601 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2602 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002603 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002604 this,
2605 newStream,
2606 (cam_stream_type_t)
2607 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2608 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2609 mMetadataChannel,
2610 0); //heap buffers are not required for HFR video channel
2611 if (channel == NULL) {
2612 LOGE("allocation of channel failed");
2613 pthread_mutex_unlock(&mMutex);
2614 return -ENOMEM;
2615 }
2616 //channel->getNumBuffers() will return 0 here so use
2617 //MAX_INFLIGH_HFR_REQUESTS
2618 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2619 newStream->priv = channel;
2620 LOGI("num video buffers in HFR mode: %d",
2621 MAX_INFLIGHT_HFR_REQUESTS);
2622 } else {
2623 /* Copy stream contents in HFR preview only case to create
2624 * dummy batch channel so that sensor streaming is in
2625 * HFR mode */
2626 if (!m_bIsVideo && (streamList->operation_mode ==
2627 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2628 mDummyBatchStream = *newStream;
2629 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002630 int bufferCount = MAX_INFLIGHT_REQUESTS;
2631 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2632 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002633 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2634 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2635 bufferCount = m_bIs4KVideo ?
2636 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2637 }
2638
Thierry Strudel2896d122017-02-23 19:18:03 -08002639 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002640 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2641 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002642 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002643 this,
2644 newStream,
2645 (cam_stream_type_t)
2646 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2647 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2648 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002649 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002650 if (channel == NULL) {
2651 LOGE("allocation of channel failed");
2652 pthread_mutex_unlock(&mMutex);
2653 return -ENOMEM;
2654 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002655 /* disable UBWC for preview, though supported,
2656 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002657 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002658 (previewSize.width == (int32_t)videoWidth)&&
2659 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002660 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002661 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002662 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002663 /* When goog_zoom is linked to the preview or video stream,
2664 * disable ubwc to the linked stream */
2665 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2666 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2667 channel->setUBWCEnabled(false);
2668 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002669 newStream->max_buffers = channel->getNumBuffers();
2670 newStream->priv = channel;
2671 }
2672 break;
2673 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2674 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2675 mChannelHandle,
2676 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002677 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002678 this,
2679 newStream,
2680 (cam_stream_type_t)
2681 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2682 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2683 mMetadataChannel);
2684 if (channel == NULL) {
2685 LOGE("allocation of YUV channel failed");
2686 pthread_mutex_unlock(&mMutex);
2687 return -ENOMEM;
2688 }
2689 newStream->max_buffers = channel->getNumBuffers();
2690 newStream->priv = channel;
2691 break;
2692 }
2693 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2694 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002695 case HAL_PIXEL_FORMAT_RAW10: {
2696 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2697 (HAL_DATASPACE_DEPTH != newStream->data_space))
2698 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002699 mRawChannel = new QCamera3RawChannel(
2700 mCameraHandle->camera_handle, mChannelHandle,
2701 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002702 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002703 this, newStream,
2704 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002705 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 if (mRawChannel == NULL) {
2707 LOGE("allocation of raw channel failed");
2708 pthread_mutex_unlock(&mMutex);
2709 return -ENOMEM;
2710 }
2711 newStream->max_buffers = mRawChannel->getNumBuffers();
2712 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2713 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002714 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002715 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002716 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2717 mDepthChannel = new QCamera3DepthChannel(
2718 mCameraHandle->camera_handle, mChannelHandle,
2719 mCameraHandle->ops, NULL, NULL, &padding_info,
2720 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2721 mMetadataChannel);
2722 if (NULL == mDepthChannel) {
2723 LOGE("Allocation of depth channel failed");
2724 pthread_mutex_unlock(&mMutex);
2725 return NO_MEMORY;
2726 }
2727 newStream->priv = mDepthChannel;
2728 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2729 } else {
2730 // Max live snapshot inflight buffer is 1. This is to mitigate
2731 // frame drop issues for video snapshot. The more buffers being
2732 // allocated, the more frame drops there are.
2733 mPictureChannel = new QCamera3PicChannel(
2734 mCameraHandle->camera_handle, mChannelHandle,
2735 mCameraHandle->ops, captureResultCb,
2736 setBufferErrorStatus, &padding_info, this, newStream,
2737 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2738 m_bIs4KVideo, isZsl, mMetadataChannel,
2739 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2740 if (mPictureChannel == NULL) {
2741 LOGE("allocation of channel failed");
2742 pthread_mutex_unlock(&mMutex);
2743 return -ENOMEM;
2744 }
2745 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2746 newStream->max_buffers = mPictureChannel->getNumBuffers();
2747 mPictureChannel->overrideYuvSize(
2748 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2749 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002750 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002751 break;
2752
2753 default:
2754 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002755 pthread_mutex_unlock(&mMutex);
2756 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002757 }
2758 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2759 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2760 } else {
2761 LOGE("Error, Unknown stream type");
2762 pthread_mutex_unlock(&mMutex);
2763 return -EINVAL;
2764 }
2765
2766 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002767 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002768 // Here we only care whether it's EIS3 or not
2769 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2770 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2771 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2772 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002773 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002774 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002775 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002776 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2777 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2778 }
2779 }
2780
2781 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2782 it != mStreamInfo.end(); it++) {
2783 if ((*it)->stream == newStream) {
2784 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2785 break;
2786 }
2787 }
2788 } else {
2789 // Channel already exists for this stream
2790 // Do nothing for now
2791 }
2792 padding_info = gCamCapability[mCameraId]->padding_info;
2793
Emilian Peev7650c122017-01-19 08:24:33 -08002794 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002795 * since there is no real stream associated with it
2796 */
Emilian Peev7650c122017-01-19 08:24:33 -08002797 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002798 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2799 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002800 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002801 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002802 }
2803
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002804 // Let buffer dispatcher know the configured streams.
2805 mOutputBufferDispatcher.configureStreams(streamList);
2806
Thierry Strudel2896d122017-02-23 19:18:03 -08002807 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2808 onlyRaw = false;
2809 }
2810
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002811 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002812 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002813 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002814 cam_analysis_info_t analysisInfo;
2815 int32_t ret = NO_ERROR;
2816 ret = mCommon.getAnalysisInfo(
2817 FALSE,
2818 analysisFeatureMask,
2819 &analysisInfo);
2820 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002821 cam_color_filter_arrangement_t analysis_color_arrangement =
2822 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2823 CAM_FILTER_ARRANGEMENT_Y :
2824 gCamCapability[mCameraId]->color_arrangement);
2825 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2826 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002827 cam_dimension_t analysisDim;
2828 analysisDim = mCommon.getMatchingDimension(previewSize,
2829 analysisInfo.analysis_recommended_res);
2830
2831 mAnalysisChannel = new QCamera3SupportChannel(
2832 mCameraHandle->camera_handle,
2833 mChannelHandle,
2834 mCameraHandle->ops,
2835 &analysisInfo.analysis_padding_info,
2836 analysisFeatureMask,
2837 CAM_STREAM_TYPE_ANALYSIS,
2838 &analysisDim,
2839 (analysisInfo.analysis_format
2840 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2841 : CAM_FORMAT_YUV_420_NV21),
2842 analysisInfo.hw_analysis_supported,
2843 gCamCapability[mCameraId]->color_arrangement,
2844 this,
2845 0); // force buffer count to 0
2846 } else {
2847 LOGW("getAnalysisInfo failed, ret = %d", ret);
2848 }
2849 if (!mAnalysisChannel) {
2850 LOGW("Analysis channel cannot be created");
2851 }
2852 }
2853
Thierry Strudel3d639192016-09-09 11:52:26 -07002854 //RAW DUMP channel
2855 if (mEnableRawDump && isRawStreamRequested == false){
2856 cam_dimension_t rawDumpSize;
2857 rawDumpSize = getMaxRawSize(mCameraId);
2858 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2859 setPAAFSupport(rawDumpFeatureMask,
2860 CAM_STREAM_TYPE_RAW,
2861 gCamCapability[mCameraId]->color_arrangement);
2862 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2863 mChannelHandle,
2864 mCameraHandle->ops,
2865 rawDumpSize,
2866 &padding_info,
2867 this, rawDumpFeatureMask);
2868 if (!mRawDumpChannel) {
2869 LOGE("Raw Dump channel cannot be created");
2870 pthread_mutex_unlock(&mMutex);
2871 return -ENOMEM;
2872 }
2873 }
2874
Thierry Strudel3d639192016-09-09 11:52:26 -07002875 if (mAnalysisChannel) {
2876 cam_analysis_info_t analysisInfo;
2877 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2878 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2879 CAM_STREAM_TYPE_ANALYSIS;
2880 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2881 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002882 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002883 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2884 &analysisInfo);
2885 if (rc != NO_ERROR) {
2886 LOGE("getAnalysisInfo failed, ret = %d", rc);
2887 pthread_mutex_unlock(&mMutex);
2888 return rc;
2889 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002890 cam_color_filter_arrangement_t analysis_color_arrangement =
2891 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2892 CAM_FILTER_ARRANGEMENT_Y :
2893 gCamCapability[mCameraId]->color_arrangement);
2894 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2895 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2896 analysis_color_arrangement);
2897
Thierry Strudel3d639192016-09-09 11:52:26 -07002898 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002899 mCommon.getMatchingDimension(previewSize,
2900 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002901 mStreamConfigInfo.num_streams++;
2902 }
2903
Thierry Strudel2896d122017-02-23 19:18:03 -08002904 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002905 cam_analysis_info_t supportInfo;
2906 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2907 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2908 setPAAFSupport(callbackFeatureMask,
2909 CAM_STREAM_TYPE_CALLBACK,
2910 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002911 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002912 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002913 if (ret != NO_ERROR) {
2914 /* Ignore the error for Mono camera
2915 * because the PAAF bit mask is only set
2916 * for CAM_STREAM_TYPE_ANALYSIS stream type
2917 */
2918 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2919 LOGW("getAnalysisInfo failed, ret = %d", ret);
2920 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002921 }
2922 mSupportChannel = new QCamera3SupportChannel(
2923 mCameraHandle->camera_handle,
2924 mChannelHandle,
2925 mCameraHandle->ops,
2926 &gCamCapability[mCameraId]->padding_info,
2927 callbackFeatureMask,
2928 CAM_STREAM_TYPE_CALLBACK,
2929 &QCamera3SupportChannel::kDim,
2930 CAM_FORMAT_YUV_420_NV21,
2931 supportInfo.hw_analysis_supported,
2932 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002933 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002934 if (!mSupportChannel) {
2935 LOGE("dummy channel cannot be created");
2936 pthread_mutex_unlock(&mMutex);
2937 return -ENOMEM;
2938 }
2939 }
2940
2941 if (mSupportChannel) {
2942 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2943 QCamera3SupportChannel::kDim;
2944 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2945 CAM_STREAM_TYPE_CALLBACK;
2946 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2947 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2948 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2949 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2950 gCamCapability[mCameraId]->color_arrangement);
2951 mStreamConfigInfo.num_streams++;
2952 }
2953
2954 if (mRawDumpChannel) {
2955 cam_dimension_t rawSize;
2956 rawSize = getMaxRawSize(mCameraId);
2957 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2958 rawSize;
2959 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2960 CAM_STREAM_TYPE_RAW;
2961 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2962 CAM_QCOM_FEATURE_NONE;
2963 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2964 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2965 gCamCapability[mCameraId]->color_arrangement);
2966 mStreamConfigInfo.num_streams++;
2967 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002968
2969 if (mHdrPlusRawSrcChannel) {
2970 cam_dimension_t rawSize;
2971 rawSize = getMaxRawSize(mCameraId);
2972 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2973 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2974 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2975 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2976 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2977 gCamCapability[mCameraId]->color_arrangement);
2978 mStreamConfigInfo.num_streams++;
2979 }
2980
Thierry Strudel3d639192016-09-09 11:52:26 -07002981 /* In HFR mode, if video stream is not added, create a dummy channel so that
2982 * ISP can create a batch mode even for preview only case. This channel is
2983 * never 'start'ed (no stream-on), it is only 'initialized' */
2984 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2985 !m_bIsVideo) {
2986 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2987 setPAAFSupport(dummyFeatureMask,
2988 CAM_STREAM_TYPE_VIDEO,
2989 gCamCapability[mCameraId]->color_arrangement);
2990 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2991 mChannelHandle,
2992 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002993 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002994 this,
2995 &mDummyBatchStream,
2996 CAM_STREAM_TYPE_VIDEO,
2997 dummyFeatureMask,
2998 mMetadataChannel);
2999 if (NULL == mDummyBatchChannel) {
3000 LOGE("creation of mDummyBatchChannel failed."
3001 "Preview will use non-hfr sensor mode ");
3002 }
3003 }
3004 if (mDummyBatchChannel) {
3005 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3006 mDummyBatchStream.width;
3007 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3008 mDummyBatchStream.height;
3009 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3010 CAM_STREAM_TYPE_VIDEO;
3011 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3012 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3013 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3014 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3015 gCamCapability[mCameraId]->color_arrangement);
3016 mStreamConfigInfo.num_streams++;
3017 }
3018
3019 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3020 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003021 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003022 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003023
3024 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3025 for (pendingRequestIterator i = mPendingRequestsList.begin();
3026 i != mPendingRequestsList.end();) {
3027 i = erasePendingRequest(i);
3028 }
3029 mPendingFrameDropList.clear();
3030 // Initialize/Reset the pending buffers list
3031 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3032 req.mPendingBufferList.clear();
3033 }
3034 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003035 mExpectedInflightDuration = 0;
3036 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003037
Thierry Strudel3d639192016-09-09 11:52:26 -07003038 mCurJpegMeta.clear();
3039 //Get min frame duration for this streams configuration
3040 deriveMinFrameDuration();
3041
Chien-Yu Chenee335912017-02-09 17:53:20 -08003042 mFirstPreviewIntentSeen = false;
3043
3044 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003045 {
3046 Mutex::Autolock l(gHdrPlusClientLock);
3047 disableHdrPlusModeLocked();
3048 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003049
Thierry Strudel3d639192016-09-09 11:52:26 -07003050 // Update state
3051 mState = CONFIGURED;
3052
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003053 mFirstMetadataCallback = true;
3054
Thierry Strudel3d639192016-09-09 11:52:26 -07003055 pthread_mutex_unlock(&mMutex);
3056
3057 return rc;
3058}
3059
3060/*===========================================================================
3061 * FUNCTION : validateCaptureRequest
3062 *
3063 * DESCRIPTION: validate a capture request from camera service
3064 *
3065 * PARAMETERS :
3066 * @request : request from framework to process
3067 *
3068 * RETURN :
3069 *
3070 *==========================================================================*/
3071int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003072 camera3_capture_request_t *request,
3073 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003074{
3075 ssize_t idx = 0;
3076 const camera3_stream_buffer_t *b;
3077 CameraMetadata meta;
3078
3079 /* Sanity check the request */
3080 if (request == NULL) {
3081 LOGE("NULL capture request");
3082 return BAD_VALUE;
3083 }
3084
3085 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3086 /*settings cannot be null for the first request*/
3087 return BAD_VALUE;
3088 }
3089
3090 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003091 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3092 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003093 LOGE("Request %d: No output buffers provided!",
3094 __FUNCTION__, frameNumber);
3095 return BAD_VALUE;
3096 }
3097 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3098 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3099 request->num_output_buffers, MAX_NUM_STREAMS);
3100 return BAD_VALUE;
3101 }
3102 if (request->input_buffer != NULL) {
3103 b = request->input_buffer;
3104 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3105 LOGE("Request %d: Buffer %ld: Status not OK!",
3106 frameNumber, (long)idx);
3107 return BAD_VALUE;
3108 }
3109 if (b->release_fence != -1) {
3110 LOGE("Request %d: Buffer %ld: Has a release fence!",
3111 frameNumber, (long)idx);
3112 return BAD_VALUE;
3113 }
3114 if (b->buffer == NULL) {
3115 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3116 frameNumber, (long)idx);
3117 return BAD_VALUE;
3118 }
3119 }
3120
3121 // Validate all buffers
3122 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003123 if (b == NULL) {
3124 return BAD_VALUE;
3125 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003126 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003127 QCamera3ProcessingChannel *channel =
3128 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3129 if (channel == NULL) {
3130 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3131 frameNumber, (long)idx);
3132 return BAD_VALUE;
3133 }
3134 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3135 LOGE("Request %d: Buffer %ld: Status not OK!",
3136 frameNumber, (long)idx);
3137 return BAD_VALUE;
3138 }
3139 if (b->release_fence != -1) {
3140 LOGE("Request %d: Buffer %ld: Has a release fence!",
3141 frameNumber, (long)idx);
3142 return BAD_VALUE;
3143 }
3144 if (b->buffer == NULL) {
3145 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3146 frameNumber, (long)idx);
3147 return BAD_VALUE;
3148 }
3149 if (*(b->buffer) == NULL) {
3150 LOGE("Request %d: Buffer %ld: NULL private handle!",
3151 frameNumber, (long)idx);
3152 return BAD_VALUE;
3153 }
3154 idx++;
3155 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003156 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003157 return NO_ERROR;
3158}
3159
3160/*===========================================================================
3161 * FUNCTION : deriveMinFrameDuration
3162 *
3163 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3164 * on currently configured streams.
3165 *
3166 * PARAMETERS : NONE
3167 *
3168 * RETURN : NONE
3169 *
3170 *==========================================================================*/
3171void QCamera3HardwareInterface::deriveMinFrameDuration()
3172{
3173 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003174 bool hasRaw = false;
3175
3176 mMinRawFrameDuration = 0;
3177 mMinJpegFrameDuration = 0;
3178 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003179
3180 maxJpegDim = 0;
3181 maxProcessedDim = 0;
3182 maxRawDim = 0;
3183
3184 // Figure out maximum jpeg, processed, and raw dimensions
3185 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3186 it != mStreamInfo.end(); it++) {
3187
3188 // Input stream doesn't have valid stream_type
3189 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3190 continue;
3191
3192 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3193 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3194 if (dimension > maxJpegDim)
3195 maxJpegDim = dimension;
3196 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3197 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3198 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003199 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003200 if (dimension > maxRawDim)
3201 maxRawDim = dimension;
3202 } else {
3203 if (dimension > maxProcessedDim)
3204 maxProcessedDim = dimension;
3205 }
3206 }
3207
3208 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3209 MAX_SIZES_CNT);
3210
3211 //Assume all jpeg dimensions are in processed dimensions.
3212 if (maxJpegDim > maxProcessedDim)
3213 maxProcessedDim = maxJpegDim;
3214 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003215 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003216 maxRawDim = INT32_MAX;
3217
3218 for (size_t i = 0; i < count; i++) {
3219 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3220 gCamCapability[mCameraId]->raw_dim[i].height;
3221 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3222 maxRawDim = dimension;
3223 }
3224 }
3225
3226 //Find minimum durations for processed, jpeg, and raw
3227 for (size_t i = 0; i < count; i++) {
3228 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3229 gCamCapability[mCameraId]->raw_dim[i].height) {
3230 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3231 break;
3232 }
3233 }
3234 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3235 for (size_t i = 0; i < count; i++) {
3236 if (maxProcessedDim ==
3237 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3238 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3239 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3240 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3241 break;
3242 }
3243 }
3244}
3245
3246/*===========================================================================
3247 * FUNCTION : getMinFrameDuration
3248 *
3249 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3250 * and current request configuration.
3251 *
3252 * PARAMETERS : @request: requset sent by the frameworks
3253 *
3254 * RETURN : min farme duration for a particular request
3255 *
3256 *==========================================================================*/
3257int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3258{
3259 bool hasJpegStream = false;
3260 bool hasRawStream = false;
3261 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3262 const camera3_stream_t *stream = request->output_buffers[i].stream;
3263 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3264 hasJpegStream = true;
3265 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3266 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3267 stream->format == HAL_PIXEL_FORMAT_RAW16)
3268 hasRawStream = true;
3269 }
3270
3271 if (!hasJpegStream)
3272 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3273 else
3274 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3275}
3276
3277/*===========================================================================
3278 * FUNCTION : handleBuffersDuringFlushLock
3279 *
3280 * DESCRIPTION: Account for buffers returned from back-end during flush
3281 * This function is executed while mMutex is held by the caller.
3282 *
3283 * PARAMETERS :
3284 * @buffer: image buffer for the callback
3285 *
3286 * RETURN :
3287 *==========================================================================*/
3288void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3289{
3290 bool buffer_found = false;
3291 for (List<PendingBuffersInRequest>::iterator req =
3292 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3293 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3294 for (List<PendingBufferInfo>::iterator i =
3295 req->mPendingBufferList.begin();
3296 i != req->mPendingBufferList.end(); i++) {
3297 if (i->buffer == buffer->buffer) {
3298 mPendingBuffersMap.numPendingBufsAtFlush--;
3299 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3300 buffer->buffer, req->frame_number,
3301 mPendingBuffersMap.numPendingBufsAtFlush);
3302 buffer_found = true;
3303 break;
3304 }
3305 }
3306 if (buffer_found) {
3307 break;
3308 }
3309 }
3310 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3311 //signal the flush()
3312 LOGD("All buffers returned to HAL. Continue flush");
3313 pthread_cond_signal(&mBuffersCond);
3314 }
3315}
3316
Thierry Strudel3d639192016-09-09 11:52:26 -07003317/*===========================================================================
3318 * FUNCTION : handleBatchMetadata
3319 *
3320 * DESCRIPTION: Handles metadata buffer callback in batch mode
3321 *
3322 * PARAMETERS : @metadata_buf: metadata buffer
3323 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3324 * the meta buf in this method
3325 *
3326 * RETURN :
3327 *
3328 *==========================================================================*/
3329void QCamera3HardwareInterface::handleBatchMetadata(
3330 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3331{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003332 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003333
3334 if (NULL == metadata_buf) {
3335 LOGE("metadata_buf is NULL");
3336 return;
3337 }
3338 /* In batch mode, the metdata will contain the frame number and timestamp of
3339 * the last frame in the batch. Eg: a batch containing buffers from request
3340 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3341 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3342 * multiple process_capture_results */
3343 metadata_buffer_t *metadata =
3344 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3345 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3346 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3347 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3348 uint32_t frame_number = 0, urgent_frame_number = 0;
3349 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3350 bool invalid_metadata = false;
3351 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3352 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003353 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003354
3355 int32_t *p_frame_number_valid =
3356 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3357 uint32_t *p_frame_number =
3358 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3359 int64_t *p_capture_time =
3360 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3361 int32_t *p_urgent_frame_number_valid =
3362 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3363 uint32_t *p_urgent_frame_number =
3364 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3365
3366 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3367 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3368 (NULL == p_urgent_frame_number)) {
3369 LOGE("Invalid metadata");
3370 invalid_metadata = true;
3371 } else {
3372 frame_number_valid = *p_frame_number_valid;
3373 last_frame_number = *p_frame_number;
3374 last_frame_capture_time = *p_capture_time;
3375 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3376 last_urgent_frame_number = *p_urgent_frame_number;
3377 }
3378
3379 /* In batchmode, when no video buffers are requested, set_parms are sent
3380 * for every capture_request. The difference between consecutive urgent
3381 * frame numbers and frame numbers should be used to interpolate the
3382 * corresponding frame numbers and time stamps */
3383 pthread_mutex_lock(&mMutex);
3384 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003385 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3386 if(idx < 0) {
3387 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3388 last_urgent_frame_number);
3389 mState = ERROR;
3390 pthread_mutex_unlock(&mMutex);
3391 return;
3392 }
3393 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003394 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3395 first_urgent_frame_number;
3396
3397 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3398 urgent_frame_number_valid,
3399 first_urgent_frame_number, last_urgent_frame_number);
3400 }
3401
3402 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003403 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3404 if(idx < 0) {
3405 LOGE("Invalid frame number received: %d. Irrecoverable error",
3406 last_frame_number);
3407 mState = ERROR;
3408 pthread_mutex_unlock(&mMutex);
3409 return;
3410 }
3411 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003412 frameNumDiff = last_frame_number + 1 -
3413 first_frame_number;
3414 mPendingBatchMap.removeItem(last_frame_number);
3415
3416 LOGD("frm: valid: %d frm_num: %d - %d",
3417 frame_number_valid,
3418 first_frame_number, last_frame_number);
3419
3420 }
3421 pthread_mutex_unlock(&mMutex);
3422
3423 if (urgent_frame_number_valid || frame_number_valid) {
3424 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3425 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3426 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3427 urgentFrameNumDiff, last_urgent_frame_number);
3428 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3429 LOGE("frameNumDiff: %d frameNum: %d",
3430 frameNumDiff, last_frame_number);
3431 }
3432
3433 for (size_t i = 0; i < loopCount; i++) {
3434 /* handleMetadataWithLock is called even for invalid_metadata for
3435 * pipeline depth calculation */
3436 if (!invalid_metadata) {
3437 /* Infer frame number. Batch metadata contains frame number of the
3438 * last frame */
3439 if (urgent_frame_number_valid) {
3440 if (i < urgentFrameNumDiff) {
3441 urgent_frame_number =
3442 first_urgent_frame_number + i;
3443 LOGD("inferred urgent frame_number: %d",
3444 urgent_frame_number);
3445 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3446 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3447 } else {
3448 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3449 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3450 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3451 }
3452 }
3453
3454 /* Infer frame number. Batch metadata contains frame number of the
3455 * last frame */
3456 if (frame_number_valid) {
3457 if (i < frameNumDiff) {
3458 frame_number = first_frame_number + i;
3459 LOGD("inferred frame_number: %d", frame_number);
3460 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3461 CAM_INTF_META_FRAME_NUMBER, frame_number);
3462 } else {
3463 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3464 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3465 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3466 }
3467 }
3468
3469 if (last_frame_capture_time) {
3470 //Infer timestamp
3471 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003472 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003473 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003474 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003475 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3476 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3477 LOGD("batch capture_time: %lld, capture_time: %lld",
3478 last_frame_capture_time, capture_time);
3479 }
3480 }
3481 pthread_mutex_lock(&mMutex);
3482 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003483 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003484 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3485 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003486 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003487 pthread_mutex_unlock(&mMutex);
3488 }
3489
3490 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003491 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003492 mMetadataChannel->bufDone(metadata_buf);
3493 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003494 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 }
3496}
3497
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003498void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3499 camera3_error_msg_code_t errorCode)
3500{
3501 camera3_notify_msg_t notify_msg;
3502 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3503 notify_msg.type = CAMERA3_MSG_ERROR;
3504 notify_msg.message.error.error_code = errorCode;
3505 notify_msg.message.error.error_stream = NULL;
3506 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003507 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003508
3509 return;
3510}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003511
3512/*===========================================================================
3513 * FUNCTION : sendPartialMetadataWithLock
3514 *
3515 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3516 *
3517 * PARAMETERS : @metadata: metadata buffer
3518 * @requestIter: The iterator for the pending capture request for
3519 * which the partial result is being sen
3520 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3521 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003522 * @isJumpstartMetadata: Whether this is a partial metadata for
3523 * jumpstart, i.e. even though it doesn't map to a valid partial
3524 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003525 *
3526 * RETURN :
3527 *
3528 *==========================================================================*/
3529
3530void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3531 metadata_buffer_t *metadata,
3532 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003533 bool lastUrgentMetadataInBatch,
3534 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003535{
3536 camera3_capture_result_t result;
3537 memset(&result, 0, sizeof(camera3_capture_result_t));
3538
3539 requestIter->partial_result_cnt++;
3540
3541 // Extract 3A metadata
3542 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003543 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3544 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003545 // Populate metadata result
3546 result.frame_number = requestIter->frame_number;
3547 result.num_output_buffers = 0;
3548 result.output_buffers = NULL;
3549 result.partial_result = requestIter->partial_result_cnt;
3550
3551 {
3552 Mutex::Autolock l(gHdrPlusClientLock);
3553 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3554 // Notify HDR+ client about the partial metadata.
3555 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3556 result.partial_result == PARTIAL_RESULT_COUNT);
3557 }
3558 }
3559
3560 orchestrateResult(&result);
3561 LOGD("urgent frame_number = %u", result.frame_number);
3562 free_camera_metadata((camera_metadata_t *)result.result);
3563}
3564
Thierry Strudel3d639192016-09-09 11:52:26 -07003565/*===========================================================================
3566 * FUNCTION : handleMetadataWithLock
3567 *
3568 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3569 *
3570 * PARAMETERS : @metadata_buf: metadata buffer
3571 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3572 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003573 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3574 * last urgent metadata in a batch. Always true for non-batch mode
3575 * @lastMetadataInBatch: Boolean to indicate whether this is the
3576 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003577 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3578 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003579 *
3580 * RETURN :
3581 *
3582 *==========================================================================*/
3583void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003584 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003585 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3586 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003587{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003588 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003589 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3590 //during flush do not send metadata from this thread
3591 LOGD("not sending metadata during flush or when mState is error");
3592 if (free_and_bufdone_meta_buf) {
3593 mMetadataChannel->bufDone(metadata_buf);
3594 free(metadata_buf);
3595 }
3596 return;
3597 }
3598
3599 //not in flush
3600 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3601 int32_t frame_number_valid, urgent_frame_number_valid;
3602 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003603 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003604 nsecs_t currentSysTime;
3605
3606 int32_t *p_frame_number_valid =
3607 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3608 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3609 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003610 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003611 int32_t *p_urgent_frame_number_valid =
3612 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3613 uint32_t *p_urgent_frame_number =
3614 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3615 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3616 metadata) {
3617 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3618 *p_frame_number_valid, *p_frame_number);
3619 }
3620
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003621 camera_metadata_t *resultMetadata = nullptr;
3622
Thierry Strudel3d639192016-09-09 11:52:26 -07003623 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3624 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3625 LOGE("Invalid metadata");
3626 if (free_and_bufdone_meta_buf) {
3627 mMetadataChannel->bufDone(metadata_buf);
3628 free(metadata_buf);
3629 }
3630 goto done_metadata;
3631 }
3632 frame_number_valid = *p_frame_number_valid;
3633 frame_number = *p_frame_number;
3634 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003635 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003636 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3637 urgent_frame_number = *p_urgent_frame_number;
3638 currentSysTime = systemTime(CLOCK_MONOTONIC);
3639
Jason Lee603176d2017-05-31 11:43:27 -07003640 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3641 const int tries = 3;
3642 nsecs_t bestGap, measured;
3643 for (int i = 0; i < tries; ++i) {
3644 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3645 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3646 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3647 const nsecs_t gap = tmono2 - tmono;
3648 if (i == 0 || gap < bestGap) {
3649 bestGap = gap;
3650 measured = tbase - ((tmono + tmono2) >> 1);
3651 }
3652 }
3653 capture_time -= measured;
3654 }
3655
Thierry Strudel3d639192016-09-09 11:52:26 -07003656 // Detect if buffers from any requests are overdue
3657 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003658 int64_t timeout;
3659 {
3660 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3661 // If there is a pending HDR+ request, the following requests may be blocked until the
3662 // HDR+ request is done. So allow a longer timeout.
3663 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3664 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003665 if (timeout < mExpectedInflightDuration) {
3666 timeout = mExpectedInflightDuration;
3667 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003668 }
3669
3670 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003671 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003672 assert(missed.stream->priv);
3673 if (missed.stream->priv) {
3674 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3675 assert(ch->mStreams[0]);
3676 if (ch->mStreams[0]) {
3677 LOGE("Cancel missing frame = %d, buffer = %p,"
3678 "stream type = %d, stream format = %d",
3679 req.frame_number, missed.buffer,
3680 ch->mStreams[0]->getMyType(), missed.stream->format);
3681 ch->timeoutFrame(req.frame_number);
3682 }
3683 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003684 }
3685 }
3686 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003687 //For the very first metadata callback, regardless whether it contains valid
3688 //frame number, send the partial metadata for the jumpstarting requests.
3689 //Note that this has to be done even if the metadata doesn't contain valid
3690 //urgent frame number, because in the case only 1 request is ever submitted
3691 //to HAL, there won't be subsequent valid urgent frame number.
3692 if (mFirstMetadataCallback) {
3693 for (pendingRequestIterator i =
3694 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3695 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003696 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3697 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003698 }
3699 }
3700 mFirstMetadataCallback = false;
3701 }
3702
Thierry Strudel3d639192016-09-09 11:52:26 -07003703 //Partial result on process_capture_result for timestamp
3704 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003705 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003706
3707 //Recieved an urgent Frame Number, handle it
3708 //using partial results
3709 for (pendingRequestIterator i =
3710 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3711 LOGD("Iterator Frame = %d urgent frame = %d",
3712 i->frame_number, urgent_frame_number);
3713
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003714 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003715 (i->partial_result_cnt == 0)) {
3716 LOGE("Error: HAL missed urgent metadata for frame number %d",
3717 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003718 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003719 }
3720
3721 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003722 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003723 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3724 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003725 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3726 // Instant AEC settled for this frame.
3727 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3728 mInstantAECSettledFrameNumber = urgent_frame_number;
3729 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003730 break;
3731 }
3732 }
3733 }
3734
3735 if (!frame_number_valid) {
3736 LOGD("Not a valid normal frame number, used as SOF only");
3737 if (free_and_bufdone_meta_buf) {
3738 mMetadataChannel->bufDone(metadata_buf);
3739 free(metadata_buf);
3740 }
3741 goto done_metadata;
3742 }
3743 LOGH("valid frame_number = %u, capture_time = %lld",
3744 frame_number, capture_time);
3745
Emilian Peev4e0fe952017-06-30 12:40:09 -07003746 handleDepthDataLocked(metadata->depth_data, frame_number,
3747 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003748
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003749 // Check whether any stream buffer corresponding to this is dropped or not
3750 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3751 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3752 for (auto & pendingRequest : mPendingRequestsList) {
3753 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3754 mInstantAECSettledFrameNumber)) {
3755 camera3_notify_msg_t notify_msg = {};
3756 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003757 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003758 QCamera3ProcessingChannel *channel =
3759 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003760 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003761 if (p_cam_frame_drop) {
3762 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003763 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003764 // Got the stream ID for drop frame.
3765 dropFrame = true;
3766 break;
3767 }
3768 }
3769 } else {
3770 // This is instant AEC case.
3771 // For instant AEC drop the stream untill AEC is settled.
3772 dropFrame = true;
3773 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003774
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003775 if (dropFrame) {
3776 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3777 if (p_cam_frame_drop) {
3778 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003779 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003780 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003781 } else {
3782 // For instant AEC, inform frame drop and frame number
3783 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3784 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003785 pendingRequest.frame_number, streamID,
3786 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003787 }
3788 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003789 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003790 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003791 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003792 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003793 if (p_cam_frame_drop) {
3794 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003795 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003796 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003797 } else {
3798 // For instant AEC, inform frame drop and frame number
3799 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3800 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003801 pendingRequest.frame_number, streamID,
3802 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003803 }
3804 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003805 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003806 PendingFrameDrop.stream_ID = streamID;
3807 // Add the Frame drop info to mPendingFrameDropList
3808 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003809 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003810 }
3811 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003812 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003813
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003814 for (auto & pendingRequest : mPendingRequestsList) {
3815 // Find the pending request with the frame number.
3816 if (pendingRequest.frame_number == frame_number) {
3817 // Update the sensor timestamp.
3818 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003819
Thierry Strudel3d639192016-09-09 11:52:26 -07003820
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003821 /* Set the timestamp in display metadata so that clients aware of
3822 private_handle such as VT can use this un-modified timestamps.
3823 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003824 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003825
Thierry Strudel3d639192016-09-09 11:52:26 -07003826 // Find channel requiring metadata, meaning internal offline postprocess
3827 // is needed.
3828 //TODO: for now, we don't support two streams requiring metadata at the same time.
3829 // (because we are not making copies, and metadata buffer is not reference counted.
3830 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003831 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3832 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003833 if (iter->need_metadata) {
3834 internalPproc = true;
3835 QCamera3ProcessingChannel *channel =
3836 (QCamera3ProcessingChannel *)iter->stream->priv;
3837 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003838 if(p_is_metabuf_queued != NULL) {
3839 *p_is_metabuf_queued = true;
3840 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003841 break;
3842 }
3843 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003844 for (auto itr = pendingRequest.internalRequestList.begin();
3845 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003846 if (itr->need_metadata) {
3847 internalPproc = true;
3848 QCamera3ProcessingChannel *channel =
3849 (QCamera3ProcessingChannel *)itr->stream->priv;
3850 channel->queueReprocMetadata(metadata_buf);
3851 break;
3852 }
3853 }
3854
Thierry Strudel54dc9782017-02-15 12:12:10 -08003855 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003856
3857 bool *enableZsl = nullptr;
3858 if (gExposeEnableZslKey) {
3859 enableZsl = &pendingRequest.enableZsl;
3860 }
3861
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003862 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003863 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003864 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003865
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003866 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003867
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003868 if (pendingRequest.blob_request) {
3869 //Dump tuning metadata if enabled and available
3870 char prop[PROPERTY_VALUE_MAX];
3871 memset(prop, 0, sizeof(prop));
3872 property_get("persist.camera.dumpmetadata", prop, "0");
3873 int32_t enabled = atoi(prop);
3874 if (enabled && metadata->is_tuning_params_valid) {
3875 dumpMetadataToFile(metadata->tuning_params,
3876 mMetaFrameCount,
3877 enabled,
3878 "Snapshot",
3879 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003880 }
3881 }
3882
3883 if (!internalPproc) {
3884 LOGD("couldn't find need_metadata for this metadata");
3885 // Return metadata buffer
3886 if (free_and_bufdone_meta_buf) {
3887 mMetadataChannel->bufDone(metadata_buf);
3888 free(metadata_buf);
3889 }
3890 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003891
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003892 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003893 }
3894 }
3895
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003896 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3897
3898 // Try to send out capture result metadata.
3899 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003900 return;
3901
Thierry Strudel3d639192016-09-09 11:52:26 -07003902done_metadata:
3903 for (pendingRequestIterator i = mPendingRequestsList.begin();
3904 i != mPendingRequestsList.end() ;i++) {
3905 i->pipeline_depth++;
3906 }
3907 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3908 unblockRequestIfNecessary();
3909}
3910
3911/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003912 * FUNCTION : handleDepthDataWithLock
3913 *
3914 * DESCRIPTION: Handles incoming depth data
3915 *
3916 * PARAMETERS : @depthData : Depth data
3917 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003918 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003919 *
3920 * RETURN :
3921 *
3922 *==========================================================================*/
3923void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003924 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003925 uint32_t currentFrameNumber;
3926 buffer_handle_t *depthBuffer;
3927
3928 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003929 return;
3930 }
3931
3932 camera3_stream_buffer_t resultBuffer =
3933 {.acquire_fence = -1,
3934 .release_fence = -1,
3935 .status = CAMERA3_BUFFER_STATUS_OK,
3936 .buffer = nullptr,
3937 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003938 do {
3939 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3940 if (nullptr == depthBuffer) {
3941 break;
3942 }
3943
Emilian Peev7650c122017-01-19 08:24:33 -08003944 resultBuffer.buffer = depthBuffer;
3945 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003946 if (valid) {
3947 int32_t rc = mDepthChannel->populateDepthData(depthData,
3948 frameNumber);
3949 if (NO_ERROR != rc) {
3950 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3951 } else {
3952 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3953 }
Emilian Peev7650c122017-01-19 08:24:33 -08003954 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003955 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003956 }
3957 } else if (currentFrameNumber > frameNumber) {
3958 break;
3959 } else {
3960 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3961 {{currentFrameNumber, mDepthChannel->getStream(),
3962 CAMERA3_MSG_ERROR_BUFFER}}};
3963 orchestrateNotify(&notify_msg);
3964
3965 LOGE("Depth buffer for frame number: %d is missing "
3966 "returning back!", currentFrameNumber);
3967 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3968 }
3969 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003970 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003971 } while (currentFrameNumber < frameNumber);
3972}
3973
3974/*===========================================================================
3975 * FUNCTION : notifyErrorFoPendingDepthData
3976 *
3977 * DESCRIPTION: Returns error for any pending depth buffers
3978 *
3979 * PARAMETERS : depthCh - depth channel that needs to get flushed
3980 *
3981 * RETURN :
3982 *
3983 *==========================================================================*/
3984void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3985 QCamera3DepthChannel *depthCh) {
3986 uint32_t currentFrameNumber;
3987 buffer_handle_t *depthBuffer;
3988
3989 if (nullptr == depthCh) {
3990 return;
3991 }
3992
3993 camera3_notify_msg_t notify_msg =
3994 {.type = CAMERA3_MSG_ERROR,
3995 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3996 camera3_stream_buffer_t resultBuffer =
3997 {.acquire_fence = -1,
3998 .release_fence = -1,
3999 .buffer = nullptr,
4000 .stream = depthCh->getStream(),
4001 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004002
4003 while (nullptr !=
4004 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4005 depthCh->unmapBuffer(currentFrameNumber);
4006
4007 notify_msg.message.error.frame_number = currentFrameNumber;
4008 orchestrateNotify(&notify_msg);
4009
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004010 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004011 };
4012}
4013
4014/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004015 * FUNCTION : hdrPlusPerfLock
4016 *
4017 * DESCRIPTION: perf lock for HDR+ using custom intent
4018 *
4019 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4020 *
4021 * RETURN : None
4022 *
4023 *==========================================================================*/
4024void QCamera3HardwareInterface::hdrPlusPerfLock(
4025 mm_camera_super_buf_t *metadata_buf)
4026{
4027 if (NULL == metadata_buf) {
4028 LOGE("metadata_buf is NULL");
4029 return;
4030 }
4031 metadata_buffer_t *metadata =
4032 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4033 int32_t *p_frame_number_valid =
4034 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4035 uint32_t *p_frame_number =
4036 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4037
4038 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4039 LOGE("%s: Invalid metadata", __func__);
4040 return;
4041 }
4042
Wei Wang01385482017-08-03 10:49:34 -07004043 //acquire perf lock for 2 secs after the last HDR frame is captured
4044 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004045 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4046 if ((p_frame_number != NULL) &&
4047 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004048 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004049 }
4050 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004051}
4052
4053/*===========================================================================
4054 * FUNCTION : handleInputBufferWithLock
4055 *
4056 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4057 *
4058 * PARAMETERS : @frame_number: frame number of the input buffer
4059 *
4060 * RETURN :
4061 *
4062 *==========================================================================*/
4063void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4064{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004065 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004066 pendingRequestIterator i = mPendingRequestsList.begin();
4067 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4068 i++;
4069 }
4070 if (i != mPendingRequestsList.end() && i->input_buffer) {
4071 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004072 CameraMetadata settings;
4073 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4074 if(i->settings) {
4075 settings = i->settings;
4076 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4077 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004078 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004079 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004080 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004081 } else {
4082 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004083 }
4084
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004085 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4086 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4087 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004088
4089 camera3_capture_result result;
4090 memset(&result, 0, sizeof(camera3_capture_result));
4091 result.frame_number = frame_number;
4092 result.result = i->settings;
4093 result.input_buffer = i->input_buffer;
4094 result.partial_result = PARTIAL_RESULT_COUNT;
4095
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004096 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004097 LOGD("Input request metadata and input buffer frame_number = %u",
4098 i->frame_number);
4099 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004100
4101 // Dispatch result metadata that may be just unblocked by this reprocess result.
4102 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004103 } else {
4104 LOGE("Could not find input request for frame number %d", frame_number);
4105 }
4106}
4107
4108/*===========================================================================
4109 * FUNCTION : handleBufferWithLock
4110 *
4111 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4112 *
4113 * PARAMETERS : @buffer: image buffer for the callback
4114 * @frame_number: frame number of the image buffer
4115 *
4116 * RETURN :
4117 *
4118 *==========================================================================*/
4119void QCamera3HardwareInterface::handleBufferWithLock(
4120 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4121{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004122 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004123
4124 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4125 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4126 }
4127
Thierry Strudel3d639192016-09-09 11:52:26 -07004128 /* Nothing to be done during error state */
4129 if ((ERROR == mState) || (DEINIT == mState)) {
4130 return;
4131 }
4132 if (mFlushPerf) {
4133 handleBuffersDuringFlushLock(buffer);
4134 return;
4135 }
4136 //not in flush
4137 // If the frame number doesn't exist in the pending request list,
4138 // directly send the buffer to the frameworks, and update pending buffers map
4139 // Otherwise, book-keep the buffer.
4140 pendingRequestIterator i = mPendingRequestsList.begin();
4141 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4142 i++;
4143 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004144
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004145 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004146 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004147 // For a reprocessing request, try to send out result metadata.
4148 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004149 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004150 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004151
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004152 // Check if this frame was dropped.
4153 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4154 m != mPendingFrameDropList.end(); m++) {
4155 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4156 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4157 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4158 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4159 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4160 frame_number, streamID);
4161 m = mPendingFrameDropList.erase(m);
4162 break;
4163 }
4164 }
4165
4166 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4167 LOGH("result frame_number = %d, buffer = %p",
4168 frame_number, buffer->buffer);
4169
4170 mPendingBuffersMap.removeBuf(buffer->buffer);
4171 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4172
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004173 if (mPreviewStarted == false) {
4174 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4175 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004176 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4177
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004178 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4179 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4180 mPreviewStarted = true;
4181
4182 // Set power hint for preview
4183 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4184 }
4185 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004186}
4187
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004188void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004189 const camera_metadata_t *resultMetadata)
4190{
4191 // Find the pending request for this result metadata.
4192 auto requestIter = mPendingRequestsList.begin();
4193 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4194 requestIter++;
4195 }
4196
4197 if (requestIter == mPendingRequestsList.end()) {
4198 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4199 return;
4200 }
4201
4202 // Update the result metadata
4203 requestIter->resultMetadata = resultMetadata;
4204
4205 // Check what type of request this is.
4206 bool liveRequest = false;
4207 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004208 // HDR+ request doesn't have partial results.
4209 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004210 } else if (requestIter->input_buffer != nullptr) {
4211 // Reprocessing request result is the same as settings.
4212 requestIter->resultMetadata = requestIter->settings;
4213 // Reprocessing request doesn't have partial results.
4214 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4215 } else {
4216 liveRequest = true;
4217 requestIter->partial_result_cnt++;
4218 mPendingLiveRequest--;
4219
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004220 {
4221 Mutex::Autolock l(gHdrPlusClientLock);
4222 // For a live request, send the metadata to HDR+ client.
4223 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4224 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4225 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4226 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004227 }
4228 }
4229
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004230 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4231}
4232
4233void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4234 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004235 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4236 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004237 bool readyToSend = true;
4238
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004239 // Iterate through the pending requests to send out result metadata that are ready. Also if
4240 // this result metadata belongs to a live request, notify errors for previous live requests
4241 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004242 auto iter = mPendingRequestsList.begin();
4243 while (iter != mPendingRequestsList.end()) {
4244 // Check if current pending request is ready. If it's not ready, the following pending
4245 // requests are also not ready.
4246 if (readyToSend && iter->resultMetadata == nullptr) {
4247 readyToSend = false;
4248 }
4249
4250 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4251
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004252 camera3_capture_result_t result = {};
4253 result.frame_number = iter->frame_number;
4254 result.result = iter->resultMetadata;
4255 result.partial_result = iter->partial_result_cnt;
4256
4257 // If this pending buffer has result metadata, we may be able to send out shutter callback
4258 // and result metadata.
4259 if (iter->resultMetadata != nullptr) {
4260 if (!readyToSend) {
4261 // If any of the previous pending request is not ready, this pending request is
4262 // also not ready to send in order to keep shutter callbacks and result metadata
4263 // in order.
4264 iter++;
4265 continue;
4266 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004267 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004268 // If the result metadata belongs to a live request, notify errors for previous pending
4269 // live requests.
4270 mPendingLiveRequest--;
4271
4272 CameraMetadata dummyMetadata;
4273 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4274 result.result = dummyMetadata.release();
4275
4276 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004277
4278 // partial_result should be PARTIAL_RESULT_CNT in case of
4279 // ERROR_RESULT.
4280 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4281 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004282 } else {
4283 iter++;
4284 continue;
4285 }
4286
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004287 result.output_buffers = nullptr;
4288 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004289 orchestrateResult(&result);
4290
4291 // For reprocessing, result metadata is the same as settings so do not free it here to
4292 // avoid double free.
4293 if (result.result != iter->settings) {
4294 free_camera_metadata((camera_metadata_t *)result.result);
4295 }
4296 iter->resultMetadata = nullptr;
4297 iter = erasePendingRequest(iter);
4298 }
4299
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004300 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004301 for (auto &iter : mPendingRequestsList) {
4302 // Increment pipeline depth for the following pending requests.
4303 if (iter.frame_number > frameNumber) {
4304 iter.pipeline_depth++;
4305 }
4306 }
4307 }
4308
4309 unblockRequestIfNecessary();
4310}
4311
Thierry Strudel3d639192016-09-09 11:52:26 -07004312/*===========================================================================
4313 * FUNCTION : unblockRequestIfNecessary
4314 *
4315 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4316 * that mMutex is held when this function is called.
4317 *
4318 * PARAMETERS :
4319 *
4320 * RETURN :
4321 *
4322 *==========================================================================*/
4323void QCamera3HardwareInterface::unblockRequestIfNecessary()
4324{
4325 // Unblock process_capture_request
4326 pthread_cond_signal(&mRequestCond);
4327}
4328
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004329/*===========================================================================
4330 * FUNCTION : isHdrSnapshotRequest
4331 *
4332 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4333 *
4334 * PARAMETERS : camera3 request structure
4335 *
4336 * RETURN : boolean decision variable
4337 *
4338 *==========================================================================*/
4339bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4340{
4341 if (request == NULL) {
4342 LOGE("Invalid request handle");
4343 assert(0);
4344 return false;
4345 }
4346
4347 if (!mForceHdrSnapshot) {
4348 CameraMetadata frame_settings;
4349 frame_settings = request->settings;
4350
4351 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4352 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4353 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4354 return false;
4355 }
4356 } else {
4357 return false;
4358 }
4359
4360 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4361 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4362 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4363 return false;
4364 }
4365 } else {
4366 return false;
4367 }
4368 }
4369
4370 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4371 if (request->output_buffers[i].stream->format
4372 == HAL_PIXEL_FORMAT_BLOB) {
4373 return true;
4374 }
4375 }
4376
4377 return false;
4378}
4379/*===========================================================================
4380 * FUNCTION : orchestrateRequest
4381 *
4382 * DESCRIPTION: Orchestrates a capture request from camera service
4383 *
4384 * PARAMETERS :
4385 * @request : request from framework to process
4386 *
4387 * RETURN : Error status codes
4388 *
4389 *==========================================================================*/
4390int32_t QCamera3HardwareInterface::orchestrateRequest(
4391 camera3_capture_request_t *request)
4392{
4393
4394 uint32_t originalFrameNumber = request->frame_number;
4395 uint32_t originalOutputCount = request->num_output_buffers;
4396 const camera_metadata_t *original_settings = request->settings;
4397 List<InternalRequest> internallyRequestedStreams;
4398 List<InternalRequest> emptyInternalList;
4399
4400 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4401 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4402 uint32_t internalFrameNumber;
4403 CameraMetadata modified_meta;
4404
4405
4406 /* Add Blob channel to list of internally requested streams */
4407 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4408 if (request->output_buffers[i].stream->format
4409 == HAL_PIXEL_FORMAT_BLOB) {
4410 InternalRequest streamRequested;
4411 streamRequested.meteringOnly = 1;
4412 streamRequested.need_metadata = 0;
4413 streamRequested.stream = request->output_buffers[i].stream;
4414 internallyRequestedStreams.push_back(streamRequested);
4415 }
4416 }
4417 request->num_output_buffers = 0;
4418 auto itr = internallyRequestedStreams.begin();
4419
4420 /* Modify setting to set compensation */
4421 modified_meta = request->settings;
4422 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4423 uint8_t aeLock = 1;
4424 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4425 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4426 camera_metadata_t *modified_settings = modified_meta.release();
4427 request->settings = modified_settings;
4428
4429 /* Capture Settling & -2x frame */
4430 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4431 request->frame_number = internalFrameNumber;
4432 processCaptureRequest(request, internallyRequestedStreams);
4433
4434 request->num_output_buffers = originalOutputCount;
4435 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4436 request->frame_number = internalFrameNumber;
4437 processCaptureRequest(request, emptyInternalList);
4438 request->num_output_buffers = 0;
4439
4440 modified_meta = modified_settings;
4441 expCompensation = 0;
4442 aeLock = 1;
4443 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4444 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4445 modified_settings = modified_meta.release();
4446 request->settings = modified_settings;
4447
4448 /* Capture Settling & 0X frame */
4449
4450 itr = internallyRequestedStreams.begin();
4451 if (itr == internallyRequestedStreams.end()) {
4452 LOGE("Error Internally Requested Stream list is empty");
4453 assert(0);
4454 } else {
4455 itr->need_metadata = 0;
4456 itr->meteringOnly = 1;
4457 }
4458
4459 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4460 request->frame_number = internalFrameNumber;
4461 processCaptureRequest(request, internallyRequestedStreams);
4462
4463 itr = internallyRequestedStreams.begin();
4464 if (itr == internallyRequestedStreams.end()) {
4465 ALOGE("Error Internally Requested Stream list is empty");
4466 assert(0);
4467 } else {
4468 itr->need_metadata = 1;
4469 itr->meteringOnly = 0;
4470 }
4471
4472 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4473 request->frame_number = internalFrameNumber;
4474 processCaptureRequest(request, internallyRequestedStreams);
4475
4476 /* Capture 2X frame*/
4477 modified_meta = modified_settings;
4478 expCompensation = GB_HDR_2X_STEP_EV;
4479 aeLock = 1;
4480 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4481 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4482 modified_settings = modified_meta.release();
4483 request->settings = modified_settings;
4484
4485 itr = internallyRequestedStreams.begin();
4486 if (itr == internallyRequestedStreams.end()) {
4487 ALOGE("Error Internally Requested Stream list is empty");
4488 assert(0);
4489 } else {
4490 itr->need_metadata = 0;
4491 itr->meteringOnly = 1;
4492 }
4493 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4494 request->frame_number = internalFrameNumber;
4495 processCaptureRequest(request, internallyRequestedStreams);
4496
4497 itr = internallyRequestedStreams.begin();
4498 if (itr == internallyRequestedStreams.end()) {
4499 ALOGE("Error Internally Requested Stream list is empty");
4500 assert(0);
4501 } else {
4502 itr->need_metadata = 1;
4503 itr->meteringOnly = 0;
4504 }
4505
4506 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4507 request->frame_number = internalFrameNumber;
4508 processCaptureRequest(request, internallyRequestedStreams);
4509
4510
4511 /* Capture 2X on original streaming config*/
4512 internallyRequestedStreams.clear();
4513
4514 /* Restore original settings pointer */
4515 request->settings = original_settings;
4516 } else {
4517 uint32_t internalFrameNumber;
4518 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4519 request->frame_number = internalFrameNumber;
4520 return processCaptureRequest(request, internallyRequestedStreams);
4521 }
4522
4523 return NO_ERROR;
4524}
4525
4526/*===========================================================================
4527 * FUNCTION : orchestrateResult
4528 *
4529 * DESCRIPTION: Orchestrates a capture result to camera service
4530 *
4531 * PARAMETERS :
4532 * @request : request from framework to process
4533 *
4534 * RETURN :
4535 *
4536 *==========================================================================*/
4537void QCamera3HardwareInterface::orchestrateResult(
4538 camera3_capture_result_t *result)
4539{
4540 uint32_t frameworkFrameNumber;
4541 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4542 frameworkFrameNumber);
4543 if (rc != NO_ERROR) {
4544 LOGE("Cannot find translated frameworkFrameNumber");
4545 assert(0);
4546 } else {
4547 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004548 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004549 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004550 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004551 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4552 camera_metadata_entry_t entry;
4553 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4554 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004555 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004556 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4557 if (ret != OK)
4558 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004559 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004560 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004561 result->frame_number = frameworkFrameNumber;
4562 mCallbackOps->process_capture_result(mCallbackOps, result);
4563 }
4564 }
4565}
4566
4567/*===========================================================================
4568 * FUNCTION : orchestrateNotify
4569 *
4570 * DESCRIPTION: Orchestrates a notify to camera service
4571 *
4572 * PARAMETERS :
4573 * @request : request from framework to process
4574 *
4575 * RETURN :
4576 *
4577 *==========================================================================*/
4578void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4579{
4580 uint32_t frameworkFrameNumber;
4581 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004582 int32_t rc = NO_ERROR;
4583
4584 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004585 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004586
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004587 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004588 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4589 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4590 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004591 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004592 LOGE("Cannot find translated frameworkFrameNumber");
4593 assert(0);
4594 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004595 }
4596 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004597
4598 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4599 LOGD("Internal Request drop the notifyCb");
4600 } else {
4601 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4602 mCallbackOps->notify(mCallbackOps, notify_msg);
4603 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004604}
4605
4606/*===========================================================================
4607 * FUNCTION : FrameNumberRegistry
4608 *
4609 * DESCRIPTION: Constructor
4610 *
4611 * PARAMETERS :
4612 *
4613 * RETURN :
4614 *
4615 *==========================================================================*/
4616FrameNumberRegistry::FrameNumberRegistry()
4617{
4618 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4619}
4620
4621/*===========================================================================
4622 * FUNCTION : ~FrameNumberRegistry
4623 *
4624 * DESCRIPTION: Destructor
4625 *
4626 * PARAMETERS :
4627 *
4628 * RETURN :
4629 *
4630 *==========================================================================*/
4631FrameNumberRegistry::~FrameNumberRegistry()
4632{
4633}
4634
4635/*===========================================================================
4636 * FUNCTION : PurgeOldEntriesLocked
4637 *
4638 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4639 *
4640 * PARAMETERS :
4641 *
4642 * RETURN : NONE
4643 *
4644 *==========================================================================*/
4645void FrameNumberRegistry::purgeOldEntriesLocked()
4646{
4647 while (_register.begin() != _register.end()) {
4648 auto itr = _register.begin();
4649 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4650 _register.erase(itr);
4651 } else {
4652 return;
4653 }
4654 }
4655}
4656
4657/*===========================================================================
4658 * FUNCTION : allocStoreInternalFrameNumber
4659 *
4660 * DESCRIPTION: Method to note down a framework request and associate a new
4661 * internal request number against it
4662 *
4663 * PARAMETERS :
4664 * @fFrameNumber: Identifier given by framework
4665 * @internalFN : Output parameter which will have the newly generated internal
4666 * entry
4667 *
4668 * RETURN : Error code
4669 *
4670 *==========================================================================*/
4671int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4672 uint32_t &internalFrameNumber)
4673{
4674 Mutex::Autolock lock(mRegistryLock);
4675 internalFrameNumber = _nextFreeInternalNumber++;
4676 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4677 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4678 purgeOldEntriesLocked();
4679 return NO_ERROR;
4680}
4681
4682/*===========================================================================
4683 * FUNCTION : generateStoreInternalFrameNumber
4684 *
4685 * DESCRIPTION: Method to associate a new internal request number independent
4686 * of any associate with framework requests
4687 *
4688 * PARAMETERS :
4689 * @internalFrame#: Output parameter which will have the newly generated internal
4690 *
4691 *
4692 * RETURN : Error code
4693 *
4694 *==========================================================================*/
4695int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4696{
4697 Mutex::Autolock lock(mRegistryLock);
4698 internalFrameNumber = _nextFreeInternalNumber++;
4699 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4700 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4701 purgeOldEntriesLocked();
4702 return NO_ERROR;
4703}
4704
4705/*===========================================================================
4706 * FUNCTION : getFrameworkFrameNumber
4707 *
4708 * DESCRIPTION: Method to query the framework framenumber given an internal #
4709 *
4710 * PARAMETERS :
4711 * @internalFrame#: Internal reference
4712 * @frameworkframenumber: Output parameter holding framework frame entry
4713 *
4714 * RETURN : Error code
4715 *
4716 *==========================================================================*/
4717int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4718 uint32_t &frameworkFrameNumber)
4719{
4720 Mutex::Autolock lock(mRegistryLock);
4721 auto itr = _register.find(internalFrameNumber);
4722 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004723 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004724 return -ENOENT;
4725 }
4726
4727 frameworkFrameNumber = itr->second;
4728 purgeOldEntriesLocked();
4729 return NO_ERROR;
4730}
Thierry Strudel3d639192016-09-09 11:52:26 -07004731
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004732status_t QCamera3HardwareInterface::fillPbStreamConfig(
4733 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4734 QCamera3Channel *channel, uint32_t streamIndex) {
4735 if (config == nullptr) {
4736 LOGE("%s: config is null", __FUNCTION__);
4737 return BAD_VALUE;
4738 }
4739
4740 if (channel == nullptr) {
4741 LOGE("%s: channel is null", __FUNCTION__);
4742 return BAD_VALUE;
4743 }
4744
4745 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4746 if (stream == nullptr) {
4747 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4748 return NAME_NOT_FOUND;
4749 }
4750
4751 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4752 if (streamInfo == nullptr) {
4753 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4754 return NAME_NOT_FOUND;
4755 }
4756
4757 config->id = pbStreamId;
4758 config->image.width = streamInfo->dim.width;
4759 config->image.height = streamInfo->dim.height;
4760 config->image.padding = 0;
4761 config->image.format = pbStreamFormat;
4762
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004763 uint32_t totalPlaneSize = 0;
4764
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004765 // Fill plane information.
4766 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4767 pbcamera::PlaneConfiguration plane;
4768 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4769 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4770 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004771
4772 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004773 }
4774
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004775 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004776 return OK;
4777}
4778
Thierry Strudel3d639192016-09-09 11:52:26 -07004779/*===========================================================================
4780 * FUNCTION : processCaptureRequest
4781 *
4782 * DESCRIPTION: process a capture request from camera service
4783 *
4784 * PARAMETERS :
4785 * @request : request from framework to process
4786 *
4787 * RETURN :
4788 *
4789 *==========================================================================*/
4790int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004791 camera3_capture_request_t *request,
4792 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004793{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004794 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004795 int rc = NO_ERROR;
4796 int32_t request_id;
4797 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004798 bool isVidBufRequested = false;
4799 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004800 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004801
4802 pthread_mutex_lock(&mMutex);
4803
4804 // Validate current state
4805 switch (mState) {
4806 case CONFIGURED:
4807 case STARTED:
4808 /* valid state */
4809 break;
4810
4811 case ERROR:
4812 pthread_mutex_unlock(&mMutex);
4813 handleCameraDeviceError();
4814 return -ENODEV;
4815
4816 default:
4817 LOGE("Invalid state %d", mState);
4818 pthread_mutex_unlock(&mMutex);
4819 return -ENODEV;
4820 }
4821
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004822 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004823 if (rc != NO_ERROR) {
4824 LOGE("incoming request is not valid");
4825 pthread_mutex_unlock(&mMutex);
4826 return rc;
4827 }
4828
4829 meta = request->settings;
4830
4831 // For first capture request, send capture intent, and
4832 // stream on all streams
4833 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004834 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004835 // send an unconfigure to the backend so that the isp
4836 // resources are deallocated
4837 if (!mFirstConfiguration) {
4838 cam_stream_size_info_t stream_config_info;
4839 int32_t hal_version = CAM_HAL_V3;
4840 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4841 stream_config_info.buffer_info.min_buffers =
4842 MIN_INFLIGHT_REQUESTS;
4843 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004844 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004845 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004846 clear_metadata_buffer(mParameters);
4847 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4848 CAM_INTF_PARM_HAL_VERSION, hal_version);
4849 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4850 CAM_INTF_META_STREAM_INFO, stream_config_info);
4851 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4852 mParameters);
4853 if (rc < 0) {
4854 LOGE("set_parms for unconfigure failed");
4855 pthread_mutex_unlock(&mMutex);
4856 return rc;
4857 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004858
Thierry Strudel3d639192016-09-09 11:52:26 -07004859 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004860 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004861 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004862 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004864 property_get("persist.camera.is_type", is_type_value, "4");
4865 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4866 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4867 property_get("persist.camera.is_type_preview", is_type_value, "4");
4868 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4869 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004870
4871 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4872 int32_t hal_version = CAM_HAL_V3;
4873 uint8_t captureIntent =
4874 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4875 mCaptureIntent = captureIntent;
4876 clear_metadata_buffer(mParameters);
4877 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4878 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4879 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004880 if (mFirstConfiguration) {
4881 // configure instant AEC
4882 // Instant AEC is a session based parameter and it is needed only
4883 // once per complete session after open camera.
4884 // i.e. This is set only once for the first capture request, after open camera.
4885 setInstantAEC(meta);
4886 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004887 uint8_t fwkVideoStabMode=0;
4888 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4889 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4890 }
4891
Xue Tuecac74e2017-04-17 13:58:15 -07004892 // If EIS setprop is enabled then only turn it on for video/preview
4893 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004894 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004895 int32_t vsMode;
4896 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4897 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4898 rc = BAD_VALUE;
4899 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004900 LOGD("setEis %d", setEis);
4901 bool eis3Supported = false;
4902 size_t count = IS_TYPE_MAX;
4903 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4904 for (size_t i = 0; i < count; i++) {
4905 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4906 eis3Supported = true;
4907 break;
4908 }
4909 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004910
4911 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004912 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004913 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4914 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004915 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4916 is_type = isTypePreview;
4917 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4918 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4919 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004920 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004921 } else {
4922 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004923 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004924 } else {
4925 is_type = IS_TYPE_NONE;
4926 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004928 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4930 }
4931 }
4932
4933 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4934 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4935
Thierry Strudel54dc9782017-02-15 12:12:10 -08004936 //Disable tintless only if the property is set to 0
4937 memset(prop, 0, sizeof(prop));
4938 property_get("persist.camera.tintless.enable", prop, "1");
4939 int32_t tintless_value = atoi(prop);
4940
Thierry Strudel3d639192016-09-09 11:52:26 -07004941 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4942 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004943
Thierry Strudel3d639192016-09-09 11:52:26 -07004944 //Disable CDS for HFR mode or if DIS/EIS is on.
4945 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4946 //after every configure_stream
4947 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4948 (m_bIsVideo)) {
4949 int32_t cds = CAM_CDS_MODE_OFF;
4950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4951 CAM_INTF_PARM_CDS_MODE, cds))
4952 LOGE("Failed to disable CDS for HFR mode");
4953
4954 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004955
4956 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4957 uint8_t* use_av_timer = NULL;
4958
4959 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004960 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004961 use_av_timer = &m_debug_avtimer;
4962 }
4963 else{
4964 use_av_timer =
4965 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004966 if (use_av_timer) {
4967 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4968 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004969 }
4970
4971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4972 rc = BAD_VALUE;
4973 }
4974 }
4975
Thierry Strudel3d639192016-09-09 11:52:26 -07004976 setMobicat();
4977
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004978 uint8_t nrMode = 0;
4979 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4980 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4981 }
4982
Thierry Strudel3d639192016-09-09 11:52:26 -07004983 /* Set fps and hfr mode while sending meta stream info so that sensor
4984 * can configure appropriate streaming mode */
4985 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004986 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4987 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004988 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4989 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004990 if (rc == NO_ERROR) {
4991 int32_t max_fps =
4992 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004993 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004994 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4995 }
4996 /* For HFR, more buffers are dequeued upfront to improve the performance */
4997 if (mBatchSize) {
4998 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4999 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5000 }
5001 }
5002 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005003 LOGE("setHalFpsRange failed");
5004 }
5005 }
5006 if (meta.exists(ANDROID_CONTROL_MODE)) {
5007 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5008 rc = extractSceneMode(meta, metaMode, mParameters);
5009 if (rc != NO_ERROR) {
5010 LOGE("extractSceneMode failed");
5011 }
5012 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005013 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005014
Thierry Strudel04e026f2016-10-10 11:27:36 -07005015 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5016 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5017 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5018 rc = setVideoHdrMode(mParameters, vhdr);
5019 if (rc != NO_ERROR) {
5020 LOGE("setVideoHDR is failed");
5021 }
5022 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005023
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005024 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005025 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005026 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005027 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5028 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5029 sensorModeFullFov)) {
5030 rc = BAD_VALUE;
5031 }
5032 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005033 //TODO: validate the arguments, HSV scenemode should have only the
5034 //advertised fps ranges
5035
5036 /*set the capture intent, hal version, tintless, stream info,
5037 *and disenable parameters to the backend*/
5038 LOGD("set_parms META_STREAM_INFO " );
5039 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005040 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5041 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005042 mStreamConfigInfo.type[i],
5043 mStreamConfigInfo.stream_sizes[i].width,
5044 mStreamConfigInfo.stream_sizes[i].height,
5045 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005046 mStreamConfigInfo.format[i],
5047 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005048 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005049
Thierry Strudel3d639192016-09-09 11:52:26 -07005050 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5051 mParameters);
5052 if (rc < 0) {
5053 LOGE("set_parms failed for hal version, stream info");
5054 }
5055
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005056 cam_sensor_mode_info_t sensorModeInfo = {};
5057 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 if (rc != NO_ERROR) {
5059 LOGE("Failed to get sensor output size");
5060 pthread_mutex_unlock(&mMutex);
5061 goto error_exit;
5062 }
5063
5064 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5065 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005066 sensorModeInfo.active_array_size.width,
5067 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005068
5069 /* Set batchmode before initializing channel. Since registerBuffer
5070 * internally initializes some of the channels, better set batchmode
5071 * even before first register buffer */
5072 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5073 it != mStreamInfo.end(); it++) {
5074 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5075 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5076 && mBatchSize) {
5077 rc = channel->setBatchSize(mBatchSize);
5078 //Disable per frame map unmap for HFR/batchmode case
5079 rc |= channel->setPerFrameMapUnmap(false);
5080 if (NO_ERROR != rc) {
5081 LOGE("Channel init failed %d", rc);
5082 pthread_mutex_unlock(&mMutex);
5083 goto error_exit;
5084 }
5085 }
5086 }
5087
5088 //First initialize all streams
5089 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5090 it != mStreamInfo.end(); it++) {
5091 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005092
5093 /* Initial value of NR mode is needed before stream on */
5094 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005095 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5096 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005097 setEis) {
5098 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5099 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5100 is_type = mStreamConfigInfo.is_type[i];
5101 break;
5102 }
5103 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005104 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005105 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005106 rc = channel->initialize(IS_TYPE_NONE);
5107 }
5108 if (NO_ERROR != rc) {
5109 LOGE("Channel initialization failed %d", rc);
5110 pthread_mutex_unlock(&mMutex);
5111 goto error_exit;
5112 }
5113 }
5114
5115 if (mRawDumpChannel) {
5116 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5117 if (rc != NO_ERROR) {
5118 LOGE("Error: Raw Dump Channel init failed");
5119 pthread_mutex_unlock(&mMutex);
5120 goto error_exit;
5121 }
5122 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005123 if (mHdrPlusRawSrcChannel) {
5124 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5125 if (rc != NO_ERROR) {
5126 LOGE("Error: HDR+ RAW Source Channel init failed");
5127 pthread_mutex_unlock(&mMutex);
5128 goto error_exit;
5129 }
5130 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005131 if (mSupportChannel) {
5132 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5133 if (rc < 0) {
5134 LOGE("Support channel initialization failed");
5135 pthread_mutex_unlock(&mMutex);
5136 goto error_exit;
5137 }
5138 }
5139 if (mAnalysisChannel) {
5140 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5141 if (rc < 0) {
5142 LOGE("Analysis channel initialization failed");
5143 pthread_mutex_unlock(&mMutex);
5144 goto error_exit;
5145 }
5146 }
5147 if (mDummyBatchChannel) {
5148 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5149 if (rc < 0) {
5150 LOGE("mDummyBatchChannel setBatchSize failed");
5151 pthread_mutex_unlock(&mMutex);
5152 goto error_exit;
5153 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005154 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005155 if (rc < 0) {
5156 LOGE("mDummyBatchChannel initialization failed");
5157 pthread_mutex_unlock(&mMutex);
5158 goto error_exit;
5159 }
5160 }
5161
5162 // Set bundle info
5163 rc = setBundleInfo();
5164 if (rc < 0) {
5165 LOGE("setBundleInfo failed %d", rc);
5166 pthread_mutex_unlock(&mMutex);
5167 goto error_exit;
5168 }
5169
5170 //update settings from app here
5171 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5172 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5173 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5174 }
5175 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5176 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5177 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5178 }
5179 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5180 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5181 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5182
5183 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5184 (mLinkedCameraId != mCameraId) ) {
5185 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5186 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005187 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005188 goto error_exit;
5189 }
5190 }
5191
5192 // add bundle related cameras
5193 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5194 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005195 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5196 &m_pDualCamCmdPtr->bundle_info;
5197 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005198 if (mIsDeviceLinked)
5199 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5200 else
5201 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5202
5203 pthread_mutex_lock(&gCamLock);
5204
5205 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5206 LOGE("Dualcam: Invalid Session Id ");
5207 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005208 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005209 goto error_exit;
5210 }
5211
5212 if (mIsMainCamera == 1) {
5213 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5214 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005215 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005216 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005217 // related session id should be session id of linked session
5218 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5219 } else {
5220 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5221 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005222 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005223 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005224 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5225 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005226 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005227 pthread_mutex_unlock(&gCamLock);
5228
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005229 rc = mCameraHandle->ops->set_dual_cam_cmd(
5230 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005231 if (rc < 0) {
5232 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005233 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005234 goto error_exit;
5235 }
5236 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005237 goto no_error;
5238error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005239 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005240 return rc;
5241no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005242 mWokenUpByDaemon = false;
5243 mPendingLiveRequest = 0;
5244 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005245 }
5246
5247 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005248 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005249
5250 if (mFlushPerf) {
5251 //we cannot accept any requests during flush
5252 LOGE("process_capture_request cannot proceed during flush");
5253 pthread_mutex_unlock(&mMutex);
5254 return NO_ERROR; //should return an error
5255 }
5256
5257 if (meta.exists(ANDROID_REQUEST_ID)) {
5258 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5259 mCurrentRequestId = request_id;
5260 LOGD("Received request with id: %d", request_id);
5261 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5262 LOGE("Unable to find request id field, \
5263 & no previous id available");
5264 pthread_mutex_unlock(&mMutex);
5265 return NAME_NOT_FOUND;
5266 } else {
5267 LOGD("Re-using old request id");
5268 request_id = mCurrentRequestId;
5269 }
5270
5271 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5272 request->num_output_buffers,
5273 request->input_buffer,
5274 frameNumber);
5275 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005276 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005277 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005278 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005279 uint32_t snapshotStreamId = 0;
5280 for (size_t i = 0; i < request->num_output_buffers; i++) {
5281 const camera3_stream_buffer_t& output = request->output_buffers[i];
5282 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5283
Emilian Peev7650c122017-01-19 08:24:33 -08005284 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5285 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005286 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005287 blob_request = 1;
5288 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5289 }
5290
5291 if (output.acquire_fence != -1) {
5292 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5293 close(output.acquire_fence);
5294 if (rc != OK) {
5295 LOGE("sync wait failed %d", rc);
5296 pthread_mutex_unlock(&mMutex);
5297 return rc;
5298 }
5299 }
5300
Emilian Peev0f3c3162017-03-15 12:57:46 +00005301 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5302 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005303 depthRequestPresent = true;
5304 continue;
5305 }
5306
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005307 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005308 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005309
5310 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5311 isVidBufRequested = true;
5312 }
5313 }
5314
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005315 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5316 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5317 itr++) {
5318 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5319 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5320 channel->getStreamID(channel->getStreamTypeMask());
5321
5322 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5323 isVidBufRequested = true;
5324 }
5325 }
5326
Thierry Strudel3d639192016-09-09 11:52:26 -07005327 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005328 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005329 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005330 }
5331 if (blob_request && mRawDumpChannel) {
5332 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005333 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005334 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005335 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005336 }
5337
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005338 {
5339 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5340 // Request a RAW buffer if
5341 // 1. mHdrPlusRawSrcChannel is valid.
5342 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5343 // 3. There is no pending HDR+ request.
5344 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5345 mHdrPlusPendingRequests.size() == 0) {
5346 streamsArray.stream_request[streamsArray.num_streams].streamID =
5347 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5348 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5349 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005350 }
5351
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005352 //extract capture intent
5353 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5354 mCaptureIntent =
5355 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5356 }
5357
5358 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5359 mCacMode =
5360 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5361 }
5362
5363 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005364 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005365
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005366 {
5367 Mutex::Autolock l(gHdrPlusClientLock);
5368 // If this request has a still capture intent, try to submit an HDR+ request.
5369 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5370 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5371 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5372 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005373 }
5374
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005375 if (hdrPlusRequest) {
5376 // For a HDR+ request, just set the frame parameters.
5377 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5378 if (rc < 0) {
5379 LOGE("fail to set frame parameters");
5380 pthread_mutex_unlock(&mMutex);
5381 return rc;
5382 }
5383 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005384 /* Parse the settings:
5385 * - For every request in NORMAL MODE
5386 * - For every request in HFR mode during preview only case
5387 * - For first request of every batch in HFR mode during video
5388 * recording. In batchmode the same settings except frame number is
5389 * repeated in each request of the batch.
5390 */
5391 if (!mBatchSize ||
5392 (mBatchSize && !isVidBufRequested) ||
5393 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005394 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005395 if (rc < 0) {
5396 LOGE("fail to set frame parameters");
5397 pthread_mutex_unlock(&mMutex);
5398 return rc;
5399 }
5400 }
5401 /* For batchMode HFR, setFrameParameters is not called for every
5402 * request. But only frame number of the latest request is parsed.
5403 * Keep track of first and last frame numbers in a batch so that
5404 * metadata for the frame numbers of batch can be duplicated in
5405 * handleBatchMetadta */
5406 if (mBatchSize) {
5407 if (!mToBeQueuedVidBufs) {
5408 //start of the batch
5409 mFirstFrameNumberInBatch = request->frame_number;
5410 }
5411 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5412 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5413 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005414 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005415 return BAD_VALUE;
5416 }
5417 }
5418 if (mNeedSensorRestart) {
5419 /* Unlock the mutex as restartSensor waits on the channels to be
5420 * stopped, which in turn calls stream callback functions -
5421 * handleBufferWithLock and handleMetadataWithLock */
5422 pthread_mutex_unlock(&mMutex);
5423 rc = dynamicUpdateMetaStreamInfo();
5424 if (rc != NO_ERROR) {
5425 LOGE("Restarting the sensor failed");
5426 return BAD_VALUE;
5427 }
5428 mNeedSensorRestart = false;
5429 pthread_mutex_lock(&mMutex);
5430 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005431 if(mResetInstantAEC) {
5432 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5433 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5434 mResetInstantAEC = false;
5435 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005436 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005437 if (request->input_buffer->acquire_fence != -1) {
5438 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5439 close(request->input_buffer->acquire_fence);
5440 if (rc != OK) {
5441 LOGE("input buffer sync wait failed %d", rc);
5442 pthread_mutex_unlock(&mMutex);
5443 return rc;
5444 }
5445 }
5446 }
5447
5448 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5449 mLastCustIntentFrmNum = frameNumber;
5450 }
5451 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005452 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005453 pendingRequestIterator latestRequest;
5454 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005455 pendingRequest.num_buffers = depthRequestPresent ?
5456 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005457 pendingRequest.request_id = request_id;
5458 pendingRequest.blob_request = blob_request;
5459 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005460 if (request->input_buffer) {
5461 pendingRequest.input_buffer =
5462 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5463 *(pendingRequest.input_buffer) = *(request->input_buffer);
5464 pInputBuffer = pendingRequest.input_buffer;
5465 } else {
5466 pendingRequest.input_buffer = NULL;
5467 pInputBuffer = NULL;
5468 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005469 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005470
5471 pendingRequest.pipeline_depth = 0;
5472 pendingRequest.partial_result_cnt = 0;
5473 extractJpegMetadata(mCurJpegMeta, request);
5474 pendingRequest.jpegMetadata = mCurJpegMeta;
5475 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005476 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005477 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5478 mHybridAeEnable =
5479 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5480 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005481
5482 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5483 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005484 /* DevCamDebug metadata processCaptureRequest */
5485 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5486 mDevCamDebugMetaEnable =
5487 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5488 }
5489 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5490 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005491
5492 //extract CAC info
5493 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5494 mCacMode =
5495 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5496 }
5497 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005498 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005499 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5500 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005501
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005502 // extract enableZsl info
5503 if (gExposeEnableZslKey) {
5504 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5505 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5506 mZslEnabled = pendingRequest.enableZsl;
5507 } else {
5508 pendingRequest.enableZsl = mZslEnabled;
5509 }
5510 }
5511
Thierry Strudel3d639192016-09-09 11:52:26 -07005512 PendingBuffersInRequest bufsForCurRequest;
5513 bufsForCurRequest.frame_number = frameNumber;
5514 // Mark current timestamp for the new request
5515 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005516 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005517
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005518 if (hdrPlusRequest) {
5519 // Save settings for this request.
5520 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5521 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5522
5523 // Add to pending HDR+ request queue.
5524 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5525 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5526
5527 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5528 }
5529
Thierry Strudel3d639192016-09-09 11:52:26 -07005530 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005531 if ((request->output_buffers[i].stream->data_space ==
5532 HAL_DATASPACE_DEPTH) &&
5533 (HAL_PIXEL_FORMAT_BLOB ==
5534 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005535 continue;
5536 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005537 RequestedBufferInfo requestedBuf;
5538 memset(&requestedBuf, 0, sizeof(requestedBuf));
5539 requestedBuf.stream = request->output_buffers[i].stream;
5540 requestedBuf.buffer = NULL;
5541 pendingRequest.buffers.push_back(requestedBuf);
5542
5543 // Add to buffer handle the pending buffers list
5544 PendingBufferInfo bufferInfo;
5545 bufferInfo.buffer = request->output_buffers[i].buffer;
5546 bufferInfo.stream = request->output_buffers[i].stream;
5547 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5548 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5549 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5550 frameNumber, bufferInfo.buffer,
5551 channel->getStreamTypeMask(), bufferInfo.stream->format);
5552 }
5553 // Add this request packet into mPendingBuffersMap
5554 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5555 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5556 mPendingBuffersMap.get_num_overall_buffers());
5557
5558 latestRequest = mPendingRequestsList.insert(
5559 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005560
5561 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5562 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005563 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005564 for (size_t i = 0; i < request->num_output_buffers; i++) {
5565 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5566 }
5567
Thierry Strudel3d639192016-09-09 11:52:26 -07005568 if(mFlush) {
5569 LOGI("mFlush is true");
5570 pthread_mutex_unlock(&mMutex);
5571 return NO_ERROR;
5572 }
5573
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005574 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5575 // channel.
5576 if (!hdrPlusRequest) {
5577 int indexUsed;
5578 // Notify metadata channel we receive a request
5579 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005580
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005581 if(request->input_buffer != NULL){
5582 LOGD("Input request, frame_number %d", frameNumber);
5583 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5584 if (NO_ERROR != rc) {
5585 LOGE("fail to set reproc parameters");
5586 pthread_mutex_unlock(&mMutex);
5587 return rc;
5588 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005589 }
5590
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005591 // Call request on other streams
5592 uint32_t streams_need_metadata = 0;
5593 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5594 for (size_t i = 0; i < request->num_output_buffers; i++) {
5595 const camera3_stream_buffer_t& output = request->output_buffers[i];
5596 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5597
5598 if (channel == NULL) {
5599 LOGW("invalid channel pointer for stream");
5600 continue;
5601 }
5602
5603 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5604 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5605 output.buffer, request->input_buffer, frameNumber);
5606 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005607 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005608 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5609 if (rc < 0) {
5610 LOGE("Fail to request on picture channel");
5611 pthread_mutex_unlock(&mMutex);
5612 return rc;
5613 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005614 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005615 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5616 assert(NULL != mDepthChannel);
5617 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005618
Emilian Peev7650c122017-01-19 08:24:33 -08005619 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5620 if (rc < 0) {
5621 LOGE("Fail to map on depth buffer");
5622 pthread_mutex_unlock(&mMutex);
5623 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005624 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005625 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005626 } else {
5627 LOGD("snapshot request with buffer %p, frame_number %d",
5628 output.buffer, frameNumber);
5629 if (!request->settings) {
5630 rc = channel->request(output.buffer, frameNumber,
5631 NULL, mPrevParameters, indexUsed);
5632 } else {
5633 rc = channel->request(output.buffer, frameNumber,
5634 NULL, mParameters, indexUsed);
5635 }
5636 if (rc < 0) {
5637 LOGE("Fail to request on picture channel");
5638 pthread_mutex_unlock(&mMutex);
5639 return rc;
5640 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005641
Emilian Peev7650c122017-01-19 08:24:33 -08005642 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5643 uint32_t j = 0;
5644 for (j = 0; j < streamsArray.num_streams; j++) {
5645 if (streamsArray.stream_request[j].streamID == streamId) {
5646 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5647 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5648 else
5649 streamsArray.stream_request[j].buf_index = indexUsed;
5650 break;
5651 }
5652 }
5653 if (j == streamsArray.num_streams) {
5654 LOGE("Did not find matching stream to update index");
5655 assert(0);
5656 }
5657
5658 pendingBufferIter->need_metadata = true;
5659 streams_need_metadata++;
5660 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005661 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005662 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5663 bool needMetadata = false;
5664 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5665 rc = yuvChannel->request(output.buffer, frameNumber,
5666 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5667 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005668 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005669 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005670 pthread_mutex_unlock(&mMutex);
5671 return rc;
5672 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005673
5674 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5675 uint32_t j = 0;
5676 for (j = 0; j < streamsArray.num_streams; j++) {
5677 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005678 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5679 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5680 else
5681 streamsArray.stream_request[j].buf_index = indexUsed;
5682 break;
5683 }
5684 }
5685 if (j == streamsArray.num_streams) {
5686 LOGE("Did not find matching stream to update index");
5687 assert(0);
5688 }
5689
5690 pendingBufferIter->need_metadata = needMetadata;
5691 if (needMetadata)
5692 streams_need_metadata += 1;
5693 LOGD("calling YUV channel request, need_metadata is %d",
5694 needMetadata);
5695 } else {
5696 LOGD("request with buffer %p, frame_number %d",
5697 output.buffer, frameNumber);
5698
5699 rc = channel->request(output.buffer, frameNumber, indexUsed);
5700
5701 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5702 uint32_t j = 0;
5703 for (j = 0; j < streamsArray.num_streams; j++) {
5704 if (streamsArray.stream_request[j].streamID == streamId) {
5705 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5706 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5707 else
5708 streamsArray.stream_request[j].buf_index = indexUsed;
5709 break;
5710 }
5711 }
5712 if (j == streamsArray.num_streams) {
5713 LOGE("Did not find matching stream to update index");
5714 assert(0);
5715 }
5716
5717 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5718 && mBatchSize) {
5719 mToBeQueuedVidBufs++;
5720 if (mToBeQueuedVidBufs == mBatchSize) {
5721 channel->queueBatchBuf();
5722 }
5723 }
5724 if (rc < 0) {
5725 LOGE("request failed");
5726 pthread_mutex_unlock(&mMutex);
5727 return rc;
5728 }
5729 }
5730 pendingBufferIter++;
5731 }
5732
5733 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5734 itr++) {
5735 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5736
5737 if (channel == NULL) {
5738 LOGE("invalid channel pointer for stream");
5739 assert(0);
5740 return BAD_VALUE;
5741 }
5742
5743 InternalRequest requestedStream;
5744 requestedStream = (*itr);
5745
5746
5747 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5748 LOGD("snapshot request internally input buffer %p, frame_number %d",
5749 request->input_buffer, frameNumber);
5750 if(request->input_buffer != NULL){
5751 rc = channel->request(NULL, frameNumber,
5752 pInputBuffer, &mReprocMeta, indexUsed, true,
5753 requestedStream.meteringOnly);
5754 if (rc < 0) {
5755 LOGE("Fail to request on picture channel");
5756 pthread_mutex_unlock(&mMutex);
5757 return rc;
5758 }
5759 } else {
5760 LOGD("snapshot request with frame_number %d", frameNumber);
5761 if (!request->settings) {
5762 rc = channel->request(NULL, frameNumber,
5763 NULL, mPrevParameters, indexUsed, true,
5764 requestedStream.meteringOnly);
5765 } else {
5766 rc = channel->request(NULL, frameNumber,
5767 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5768 }
5769 if (rc < 0) {
5770 LOGE("Fail to request on picture channel");
5771 pthread_mutex_unlock(&mMutex);
5772 return rc;
5773 }
5774
5775 if ((*itr).meteringOnly != 1) {
5776 requestedStream.need_metadata = 1;
5777 streams_need_metadata++;
5778 }
5779 }
5780
5781 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5782 uint32_t j = 0;
5783 for (j = 0; j < streamsArray.num_streams; j++) {
5784 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005785 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5786 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5787 else
5788 streamsArray.stream_request[j].buf_index = indexUsed;
5789 break;
5790 }
5791 }
5792 if (j == streamsArray.num_streams) {
5793 LOGE("Did not find matching stream to update index");
5794 assert(0);
5795 }
5796
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005797 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005798 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005799 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005800 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005801 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005802 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005803 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005804
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005805 //If 2 streams have need_metadata set to true, fail the request, unless
5806 //we copy/reference count the metadata buffer
5807 if (streams_need_metadata > 1) {
5808 LOGE("not supporting request in which two streams requires"
5809 " 2 HAL metadata for reprocessing");
5810 pthread_mutex_unlock(&mMutex);
5811 return -EINVAL;
5812 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005813
Emilian Peev656e4fa2017-06-02 16:47:04 +01005814 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5815 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5816 if (depthRequestPresent && mDepthChannel) {
5817 if (request->settings) {
5818 camera_metadata_ro_entry entry;
5819 if (find_camera_metadata_ro_entry(request->settings,
5820 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5821 if (entry.data.u8[0]) {
5822 pdafEnable = CAM_PD_DATA_ENABLED;
5823 } else {
5824 pdafEnable = CAM_PD_DATA_SKIP;
5825 }
5826 mDepthCloudMode = pdafEnable;
5827 } else {
5828 pdafEnable = mDepthCloudMode;
5829 }
5830 } else {
5831 pdafEnable = mDepthCloudMode;
5832 }
5833 }
5834
Emilian Peev7650c122017-01-19 08:24:33 -08005835 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5836 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5837 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5838 pthread_mutex_unlock(&mMutex);
5839 return BAD_VALUE;
5840 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005841
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005842 if (request->input_buffer == NULL) {
5843 /* Set the parameters to backend:
5844 * - For every request in NORMAL MODE
5845 * - For every request in HFR mode during preview only case
5846 * - Once every batch in HFR mode during video recording
5847 */
5848 if (!mBatchSize ||
5849 (mBatchSize && !isVidBufRequested) ||
5850 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5851 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5852 mBatchSize, isVidBufRequested,
5853 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005854
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005855 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5856 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5857 uint32_t m = 0;
5858 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5859 if (streamsArray.stream_request[k].streamID ==
5860 mBatchedStreamsArray.stream_request[m].streamID)
5861 break;
5862 }
5863 if (m == mBatchedStreamsArray.num_streams) {
5864 mBatchedStreamsArray.stream_request\
5865 [mBatchedStreamsArray.num_streams].streamID =
5866 streamsArray.stream_request[k].streamID;
5867 mBatchedStreamsArray.stream_request\
5868 [mBatchedStreamsArray.num_streams].buf_index =
5869 streamsArray.stream_request[k].buf_index;
5870 mBatchedStreamsArray.num_streams =
5871 mBatchedStreamsArray.num_streams + 1;
5872 }
5873 }
5874 streamsArray = mBatchedStreamsArray;
5875 }
5876 /* Update stream id of all the requested buffers */
5877 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5878 streamsArray)) {
5879 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005880 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005881 return BAD_VALUE;
5882 }
5883
5884 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5885 mParameters);
5886 if (rc < 0) {
5887 LOGE("set_parms failed");
5888 }
5889 /* reset to zero coz, the batch is queued */
5890 mToBeQueuedVidBufs = 0;
5891 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5892 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5893 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005894 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5895 uint32_t m = 0;
5896 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5897 if (streamsArray.stream_request[k].streamID ==
5898 mBatchedStreamsArray.stream_request[m].streamID)
5899 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005900 }
5901 if (m == mBatchedStreamsArray.num_streams) {
5902 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5903 streamID = streamsArray.stream_request[k].streamID;
5904 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5905 buf_index = streamsArray.stream_request[k].buf_index;
5906 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5907 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005908 }
5909 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005910 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005911
5912 // Start all streams after the first setting is sent, so that the
5913 // setting can be applied sooner: (0 + apply_delay)th frame.
5914 if (mState == CONFIGURED && mChannelHandle) {
5915 //Then start them.
5916 LOGH("Start META Channel");
5917 rc = mMetadataChannel->start();
5918 if (rc < 0) {
5919 LOGE("META channel start failed");
5920 pthread_mutex_unlock(&mMutex);
5921 return rc;
5922 }
5923
5924 if (mAnalysisChannel) {
5925 rc = mAnalysisChannel->start();
5926 if (rc < 0) {
5927 LOGE("Analysis channel start failed");
5928 mMetadataChannel->stop();
5929 pthread_mutex_unlock(&mMutex);
5930 return rc;
5931 }
5932 }
5933
5934 if (mSupportChannel) {
5935 rc = mSupportChannel->start();
5936 if (rc < 0) {
5937 LOGE("Support channel start failed");
5938 mMetadataChannel->stop();
5939 /* Although support and analysis are mutually exclusive today
5940 adding it in anycase for future proofing */
5941 if (mAnalysisChannel) {
5942 mAnalysisChannel->stop();
5943 }
5944 pthread_mutex_unlock(&mMutex);
5945 return rc;
5946 }
5947 }
5948 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5949 it != mStreamInfo.end(); it++) {
5950 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5951 LOGH("Start Processing Channel mask=%d",
5952 channel->getStreamTypeMask());
5953 rc = channel->start();
5954 if (rc < 0) {
5955 LOGE("channel start failed");
5956 pthread_mutex_unlock(&mMutex);
5957 return rc;
5958 }
5959 }
5960
5961 if (mRawDumpChannel) {
5962 LOGD("Starting raw dump stream");
5963 rc = mRawDumpChannel->start();
5964 if (rc != NO_ERROR) {
5965 LOGE("Error Starting Raw Dump Channel");
5966 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5967 it != mStreamInfo.end(); it++) {
5968 QCamera3Channel *channel =
5969 (QCamera3Channel *)(*it)->stream->priv;
5970 LOGH("Stopping Processing Channel mask=%d",
5971 channel->getStreamTypeMask());
5972 channel->stop();
5973 }
5974 if (mSupportChannel)
5975 mSupportChannel->stop();
5976 if (mAnalysisChannel) {
5977 mAnalysisChannel->stop();
5978 }
5979 mMetadataChannel->stop();
5980 pthread_mutex_unlock(&mMutex);
5981 return rc;
5982 }
5983 }
5984
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005985 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005986 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005987 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005988 if (rc != NO_ERROR) {
5989 LOGE("start_channel failed %d", rc);
5990 pthread_mutex_unlock(&mMutex);
5991 return rc;
5992 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005993
5994 {
5995 // Configure Easel for stream on.
5996 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005997
5998 // Now that sensor mode should have been selected, get the selected sensor mode
5999 // info.
6000 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6001 getCurrentSensorModeInfo(mSensorModeInfo);
6002
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006003 if (EaselManagerClientOpened) {
6004 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen44abb642017-06-02 18:00:38 -07006005 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6006 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006007 if (rc != OK) {
6008 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6009 mCameraId, mSensorModeInfo.op_pixel_clk);
6010 pthread_mutex_unlock(&mMutex);
6011 return rc;
6012 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006013 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006014 }
6015 }
6016
6017 // Start sensor streaming.
6018 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6019 mChannelHandle);
6020 if (rc != NO_ERROR) {
6021 LOGE("start_sensor_stream_on failed %d", rc);
6022 pthread_mutex_unlock(&mMutex);
6023 return rc;
6024 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006025 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006026 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006027 }
6028
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006029 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07006030 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006031 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen44abb642017-06-02 18:00:38 -07006032 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006033 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6034 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6035 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6036 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
6037 rc = enableHdrPlusModeLocked();
6038 if (rc != OK) {
6039 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6040 pthread_mutex_unlock(&mMutex);
6041 return rc;
6042 }
6043
6044 mFirstPreviewIntentSeen = true;
6045 }
6046 }
6047
Thierry Strudel3d639192016-09-09 11:52:26 -07006048 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6049
6050 mState = STARTED;
6051 // Added a timed condition wait
6052 struct timespec ts;
6053 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006054 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006055 if (rc < 0) {
6056 isValidTimeout = 0;
6057 LOGE("Error reading the real time clock!!");
6058 }
6059 else {
6060 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006061 int64_t timeout = 5;
6062 {
6063 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6064 // If there is a pending HDR+ request, the following requests may be blocked until the
6065 // HDR+ request is done. So allow a longer timeout.
6066 if (mHdrPlusPendingRequests.size() > 0) {
6067 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6068 }
6069 }
6070 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006071 }
6072 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006073 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006074 (mState != ERROR) && (mState != DEINIT)) {
6075 if (!isValidTimeout) {
6076 LOGD("Blocking on conditional wait");
6077 pthread_cond_wait(&mRequestCond, &mMutex);
6078 }
6079 else {
6080 LOGD("Blocking on timed conditional wait");
6081 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6082 if (rc == ETIMEDOUT) {
6083 rc = -ENODEV;
6084 LOGE("Unblocked on timeout!!!!");
6085 break;
6086 }
6087 }
6088 LOGD("Unblocked");
6089 if (mWokenUpByDaemon) {
6090 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006091 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006092 break;
6093 }
6094 }
6095 pthread_mutex_unlock(&mMutex);
6096
6097 return rc;
6098}
6099
6100/*===========================================================================
6101 * FUNCTION : dump
6102 *
6103 * DESCRIPTION:
6104 *
6105 * PARAMETERS :
6106 *
6107 *
6108 * RETURN :
6109 *==========================================================================*/
6110void QCamera3HardwareInterface::dump(int fd)
6111{
6112 pthread_mutex_lock(&mMutex);
6113 dprintf(fd, "\n Camera HAL3 information Begin \n");
6114
6115 dprintf(fd, "\nNumber of pending requests: %zu \n",
6116 mPendingRequestsList.size());
6117 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6118 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6119 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6120 for(pendingRequestIterator i = mPendingRequestsList.begin();
6121 i != mPendingRequestsList.end(); i++) {
6122 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6123 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6124 i->input_buffer);
6125 }
6126 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6127 mPendingBuffersMap.get_num_overall_buffers());
6128 dprintf(fd, "-------+------------------\n");
6129 dprintf(fd, " Frame | Stream type mask \n");
6130 dprintf(fd, "-------+------------------\n");
6131 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6132 for(auto &j : req.mPendingBufferList) {
6133 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6134 dprintf(fd, " %5d | %11d \n",
6135 req.frame_number, channel->getStreamTypeMask());
6136 }
6137 }
6138 dprintf(fd, "-------+------------------\n");
6139
6140 dprintf(fd, "\nPending frame drop list: %zu\n",
6141 mPendingFrameDropList.size());
6142 dprintf(fd, "-------+-----------\n");
6143 dprintf(fd, " Frame | Stream ID \n");
6144 dprintf(fd, "-------+-----------\n");
6145 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6146 i != mPendingFrameDropList.end(); i++) {
6147 dprintf(fd, " %5d | %9d \n",
6148 i->frame_number, i->stream_ID);
6149 }
6150 dprintf(fd, "-------+-----------\n");
6151
6152 dprintf(fd, "\n Camera HAL3 information End \n");
6153
6154 /* use dumpsys media.camera as trigger to send update debug level event */
6155 mUpdateDebugLevel = true;
6156 pthread_mutex_unlock(&mMutex);
6157 return;
6158}
6159
6160/*===========================================================================
6161 * FUNCTION : flush
6162 *
6163 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6164 * conditionally restarts channels
6165 *
6166 * PARAMETERS :
6167 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006168 * @ stopChannelImmediately: stop the channel immediately. This should be used
6169 * when device encountered an error and MIPI may has
6170 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006171 *
6172 * RETURN :
6173 * 0 on success
6174 * Error code on failure
6175 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006176int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006177{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006178 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006179 int32_t rc = NO_ERROR;
6180
6181 LOGD("Unblocking Process Capture Request");
6182 pthread_mutex_lock(&mMutex);
6183 mFlush = true;
6184 pthread_mutex_unlock(&mMutex);
6185
6186 rc = stopAllChannels();
6187 // unlink of dualcam
6188 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006189 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6190 &m_pDualCamCmdPtr->bundle_info;
6191 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006192 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6193 pthread_mutex_lock(&gCamLock);
6194
6195 if (mIsMainCamera == 1) {
6196 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6197 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006198 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006199 // related session id should be session id of linked session
6200 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6201 } else {
6202 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6203 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006204 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006205 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6206 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006207 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006208 pthread_mutex_unlock(&gCamLock);
6209
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006210 rc = mCameraHandle->ops->set_dual_cam_cmd(
6211 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006212 if (rc < 0) {
6213 LOGE("Dualcam: Unlink failed, but still proceed to close");
6214 }
6215 }
6216
6217 if (rc < 0) {
6218 LOGE("stopAllChannels failed");
6219 return rc;
6220 }
6221 if (mChannelHandle) {
6222 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006223 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006224 }
6225
6226 // Reset bundle info
6227 rc = setBundleInfo();
6228 if (rc < 0) {
6229 LOGE("setBundleInfo failed %d", rc);
6230 return rc;
6231 }
6232
6233 // Mutex Lock
6234 pthread_mutex_lock(&mMutex);
6235
6236 // Unblock process_capture_request
6237 mPendingLiveRequest = 0;
6238 pthread_cond_signal(&mRequestCond);
6239
6240 rc = notifyErrorForPendingRequests();
6241 if (rc < 0) {
6242 LOGE("notifyErrorForPendingRequests failed");
6243 pthread_mutex_unlock(&mMutex);
6244 return rc;
6245 }
6246
6247 mFlush = false;
6248
6249 // Start the Streams/Channels
6250 if (restartChannels) {
6251 rc = startAllChannels();
6252 if (rc < 0) {
6253 LOGE("startAllChannels failed");
6254 pthread_mutex_unlock(&mMutex);
6255 return rc;
6256 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006257 if (mChannelHandle) {
6258 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006259 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006260 if (rc < 0) {
6261 LOGE("start_channel failed");
6262 pthread_mutex_unlock(&mMutex);
6263 return rc;
6264 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006265 }
6266 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006267 pthread_mutex_unlock(&mMutex);
6268
6269 return 0;
6270}
6271
6272/*===========================================================================
6273 * FUNCTION : flushPerf
6274 *
6275 * DESCRIPTION: This is the performance optimization version of flush that does
6276 * not use stream off, rather flushes the system
6277 *
6278 * PARAMETERS :
6279 *
6280 *
6281 * RETURN : 0 : success
6282 * -EINVAL: input is malformed (device is not valid)
6283 * -ENODEV: if the device has encountered a serious error
6284 *==========================================================================*/
6285int QCamera3HardwareInterface::flushPerf()
6286{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006287 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006288 int32_t rc = 0;
6289 struct timespec timeout;
6290 bool timed_wait = false;
6291
6292 pthread_mutex_lock(&mMutex);
6293 mFlushPerf = true;
6294 mPendingBuffersMap.numPendingBufsAtFlush =
6295 mPendingBuffersMap.get_num_overall_buffers();
6296 LOGD("Calling flush. Wait for %d buffers to return",
6297 mPendingBuffersMap.numPendingBufsAtFlush);
6298
6299 /* send the flush event to the backend */
6300 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6301 if (rc < 0) {
6302 LOGE("Error in flush: IOCTL failure");
6303 mFlushPerf = false;
6304 pthread_mutex_unlock(&mMutex);
6305 return -ENODEV;
6306 }
6307
6308 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6309 LOGD("No pending buffers in HAL, return flush");
6310 mFlushPerf = false;
6311 pthread_mutex_unlock(&mMutex);
6312 return rc;
6313 }
6314
6315 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006316 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006317 if (rc < 0) {
6318 LOGE("Error reading the real time clock, cannot use timed wait");
6319 } else {
6320 timeout.tv_sec += FLUSH_TIMEOUT;
6321 timed_wait = true;
6322 }
6323
6324 //Block on conditional variable
6325 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6326 LOGD("Waiting on mBuffersCond");
6327 if (!timed_wait) {
6328 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6329 if (rc != 0) {
6330 LOGE("pthread_cond_wait failed due to rc = %s",
6331 strerror(rc));
6332 break;
6333 }
6334 } else {
6335 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6336 if (rc != 0) {
6337 LOGE("pthread_cond_timedwait failed due to rc = %s",
6338 strerror(rc));
6339 break;
6340 }
6341 }
6342 }
6343 if (rc != 0) {
6344 mFlushPerf = false;
6345 pthread_mutex_unlock(&mMutex);
6346 return -ENODEV;
6347 }
6348
6349 LOGD("Received buffers, now safe to return them");
6350
6351 //make sure the channels handle flush
6352 //currently only required for the picture channel to release snapshot resources
6353 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6354 it != mStreamInfo.end(); it++) {
6355 QCamera3Channel *channel = (*it)->channel;
6356 if (channel) {
6357 rc = channel->flush();
6358 if (rc) {
6359 LOGE("Flushing the channels failed with error %d", rc);
6360 // even though the channel flush failed we need to continue and
6361 // return the buffers we have to the framework, however the return
6362 // value will be an error
6363 rc = -ENODEV;
6364 }
6365 }
6366 }
6367
6368 /* notify the frameworks and send errored results */
6369 rc = notifyErrorForPendingRequests();
6370 if (rc < 0) {
6371 LOGE("notifyErrorForPendingRequests failed");
6372 pthread_mutex_unlock(&mMutex);
6373 return rc;
6374 }
6375
6376 //unblock process_capture_request
6377 mPendingLiveRequest = 0;
6378 unblockRequestIfNecessary();
6379
6380 mFlushPerf = false;
6381 pthread_mutex_unlock(&mMutex);
6382 LOGD ("Flush Operation complete. rc = %d", rc);
6383 return rc;
6384}
6385
6386/*===========================================================================
6387 * FUNCTION : handleCameraDeviceError
6388 *
6389 * DESCRIPTION: This function calls internal flush and notifies the error to
6390 * framework and updates the state variable.
6391 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006392 * PARAMETERS :
6393 * @stopChannelImmediately : stop channels immediately without waiting for
6394 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006395 *
6396 * RETURN : NO_ERROR on Success
6397 * Error code on failure
6398 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006399int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006400{
6401 int32_t rc = NO_ERROR;
6402
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006403 {
6404 Mutex::Autolock lock(mFlushLock);
6405 pthread_mutex_lock(&mMutex);
6406 if (mState != ERROR) {
6407 //if mState != ERROR, nothing to be done
6408 pthread_mutex_unlock(&mMutex);
6409 return NO_ERROR;
6410 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006411 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006412
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006413 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006414 if (NO_ERROR != rc) {
6415 LOGE("internal flush to handle mState = ERROR failed");
6416 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006417
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006418 pthread_mutex_lock(&mMutex);
6419 mState = DEINIT;
6420 pthread_mutex_unlock(&mMutex);
6421 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006422
6423 camera3_notify_msg_t notify_msg;
6424 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6425 notify_msg.type = CAMERA3_MSG_ERROR;
6426 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6427 notify_msg.message.error.error_stream = NULL;
6428 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006429 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006430
6431 return rc;
6432}
6433
6434/*===========================================================================
6435 * FUNCTION : captureResultCb
6436 *
6437 * DESCRIPTION: Callback handler for all capture result
6438 * (streams, as well as metadata)
6439 *
6440 * PARAMETERS :
6441 * @metadata : metadata information
6442 * @buffer : actual gralloc buffer to be returned to frameworks.
6443 * NULL if metadata.
6444 *
6445 * RETURN : NONE
6446 *==========================================================================*/
6447void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6448 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6449{
6450 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006451 pthread_mutex_lock(&mMutex);
6452 uint8_t batchSize = mBatchSize;
6453 pthread_mutex_unlock(&mMutex);
6454 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006455 handleBatchMetadata(metadata_buf,
6456 true /* free_and_bufdone_meta_buf */);
6457 } else { /* mBatchSize = 0 */
6458 hdrPlusPerfLock(metadata_buf);
6459 pthread_mutex_lock(&mMutex);
6460 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006461 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006462 true /* last urgent frame of batch metadata */,
6463 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006464 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006465 pthread_mutex_unlock(&mMutex);
6466 }
6467 } else if (isInputBuffer) {
6468 pthread_mutex_lock(&mMutex);
6469 handleInputBufferWithLock(frame_number);
6470 pthread_mutex_unlock(&mMutex);
6471 } else {
6472 pthread_mutex_lock(&mMutex);
6473 handleBufferWithLock(buffer, frame_number);
6474 pthread_mutex_unlock(&mMutex);
6475 }
6476 return;
6477}
6478
6479/*===========================================================================
6480 * FUNCTION : getReprocessibleOutputStreamId
6481 *
6482 * DESCRIPTION: Get source output stream id for the input reprocess stream
6483 * based on size and format, which would be the largest
6484 * output stream if an input stream exists.
6485 *
6486 * PARAMETERS :
6487 * @id : return the stream id if found
6488 *
6489 * RETURN : int32_t type of status
6490 * NO_ERROR -- success
6491 * none-zero failure code
6492 *==========================================================================*/
6493int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6494{
6495 /* check if any output or bidirectional stream with the same size and format
6496 and return that stream */
6497 if ((mInputStreamInfo.dim.width > 0) &&
6498 (mInputStreamInfo.dim.height > 0)) {
6499 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6500 it != mStreamInfo.end(); it++) {
6501
6502 camera3_stream_t *stream = (*it)->stream;
6503 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6504 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6505 (stream->format == mInputStreamInfo.format)) {
6506 // Usage flag for an input stream and the source output stream
6507 // may be different.
6508 LOGD("Found reprocessible output stream! %p", *it);
6509 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6510 stream->usage, mInputStreamInfo.usage);
6511
6512 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6513 if (channel != NULL && channel->mStreams[0]) {
6514 id = channel->mStreams[0]->getMyServerID();
6515 return NO_ERROR;
6516 }
6517 }
6518 }
6519 } else {
6520 LOGD("No input stream, so no reprocessible output stream");
6521 }
6522 return NAME_NOT_FOUND;
6523}
6524
6525/*===========================================================================
6526 * FUNCTION : lookupFwkName
6527 *
6528 * DESCRIPTION: In case the enum is not same in fwk and backend
6529 * make sure the parameter is correctly propogated
6530 *
6531 * PARAMETERS :
6532 * @arr : map between the two enums
6533 * @len : len of the map
6534 * @hal_name : name of the hal_parm to map
6535 *
6536 * RETURN : int type of status
6537 * fwk_name -- success
6538 * none-zero failure code
6539 *==========================================================================*/
6540template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6541 size_t len, halType hal_name)
6542{
6543
6544 for (size_t i = 0; i < len; i++) {
6545 if (arr[i].hal_name == hal_name) {
6546 return arr[i].fwk_name;
6547 }
6548 }
6549
6550 /* Not able to find matching framework type is not necessarily
6551 * an error case. This happens when mm-camera supports more attributes
6552 * than the frameworks do */
6553 LOGH("Cannot find matching framework type");
6554 return NAME_NOT_FOUND;
6555}
6556
6557/*===========================================================================
6558 * FUNCTION : lookupHalName
6559 *
6560 * DESCRIPTION: In case the enum is not same in fwk and backend
6561 * make sure the parameter is correctly propogated
6562 *
6563 * PARAMETERS :
6564 * @arr : map between the two enums
6565 * @len : len of the map
6566 * @fwk_name : name of the hal_parm to map
6567 *
6568 * RETURN : int32_t type of status
6569 * hal_name -- success
6570 * none-zero failure code
6571 *==========================================================================*/
6572template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6573 size_t len, fwkType fwk_name)
6574{
6575 for (size_t i = 0; i < len; i++) {
6576 if (arr[i].fwk_name == fwk_name) {
6577 return arr[i].hal_name;
6578 }
6579 }
6580
6581 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6582 return NAME_NOT_FOUND;
6583}
6584
6585/*===========================================================================
6586 * FUNCTION : lookupProp
6587 *
6588 * DESCRIPTION: lookup a value by its name
6589 *
6590 * PARAMETERS :
6591 * @arr : map between the two enums
6592 * @len : size of the map
6593 * @name : name to be looked up
6594 *
6595 * RETURN : Value if found
6596 * CAM_CDS_MODE_MAX if not found
6597 *==========================================================================*/
6598template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6599 size_t len, const char *name)
6600{
6601 if (name) {
6602 for (size_t i = 0; i < len; i++) {
6603 if (!strcmp(arr[i].desc, name)) {
6604 return arr[i].val;
6605 }
6606 }
6607 }
6608 return CAM_CDS_MODE_MAX;
6609}
6610
6611/*===========================================================================
6612 *
6613 * DESCRIPTION:
6614 *
6615 * PARAMETERS :
6616 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006617 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006618 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006619 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6620 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006621 *
6622 * RETURN : camera_metadata_t*
6623 * metadata in a format specified by fwk
6624 *==========================================================================*/
6625camera_metadata_t*
6626QCamera3HardwareInterface::translateFromHalMetadata(
6627 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006628 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006629 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006630 bool lastMetadataInBatch,
6631 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006632{
6633 CameraMetadata camMetadata;
6634 camera_metadata_t *resultMetadata;
6635
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006636 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006637 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6638 * Timestamp is needed because it's used for shutter notify calculation.
6639 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006640 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006641 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006642 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006643 }
6644
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006645 if (pendingRequest.jpegMetadata.entryCount())
6646 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006647
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006648 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6649 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6650 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6651 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6652 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006653 if (mBatchSize == 0) {
6654 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006655 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006656 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006657
Samuel Ha68ba5172016-12-15 18:41:12 -08006658 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6659 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006660 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006661 // DevCamDebug metadata translateFromHalMetadata AF
6662 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6663 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6664 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6665 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6666 }
6667 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6668 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6669 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6670 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6671 }
6672 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6673 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6674 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6675 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6676 }
6677 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6678 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6679 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6680 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6681 }
6682 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6683 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6684 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6685 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6686 }
6687 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6688 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6689 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6690 *DevCamDebug_af_monitor_pdaf_target_pos;
6691 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6692 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6693 }
6694 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6695 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6696 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6697 *DevCamDebug_af_monitor_pdaf_confidence;
6698 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6699 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6700 }
6701 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6702 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6703 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6704 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6705 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6706 }
6707 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6708 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6709 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6710 *DevCamDebug_af_monitor_tof_target_pos;
6711 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6712 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6713 }
6714 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6715 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6716 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6717 *DevCamDebug_af_monitor_tof_confidence;
6718 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6719 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6720 }
6721 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6722 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6723 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6724 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6725 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6726 }
6727 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6728 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6729 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6730 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6731 &fwk_DevCamDebug_af_monitor_type_select, 1);
6732 }
6733 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6734 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6735 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6736 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6737 &fwk_DevCamDebug_af_monitor_refocus, 1);
6738 }
6739 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6740 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6741 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6742 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6743 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6744 }
6745 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6746 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6747 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6748 *DevCamDebug_af_search_pdaf_target_pos;
6749 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6750 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6751 }
6752 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6753 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6754 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6755 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6756 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6757 }
6758 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6759 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6760 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6761 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6762 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6763 }
6764 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6765 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6766 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6767 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6768 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6769 }
6770 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6771 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6772 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6773 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6774 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6775 }
6776 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6777 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6778 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6779 *DevCamDebug_af_search_tof_target_pos;
6780 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6781 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6782 }
6783 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6784 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6785 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6786 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6787 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6788 }
6789 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6790 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6791 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6792 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6793 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6794 }
6795 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6796 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6797 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6798 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6799 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6800 }
6801 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6802 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6803 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6804 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6805 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6806 }
6807 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6808 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6809 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6810 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6811 &fwk_DevCamDebug_af_search_type_select, 1);
6812 }
6813 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6814 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6815 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6816 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6817 &fwk_DevCamDebug_af_search_next_pos, 1);
6818 }
6819 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6820 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6821 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6822 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6823 &fwk_DevCamDebug_af_search_target_pos, 1);
6824 }
6825 // DevCamDebug metadata translateFromHalMetadata AEC
6826 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6827 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6828 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6829 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6832 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6833 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6834 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6835 }
6836 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6837 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6838 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6839 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6840 }
6841 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6842 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6843 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6844 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6845 }
6846 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6847 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6848 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6849 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6850 }
6851 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6852 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6853 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6854 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6855 }
6856 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6857 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6858 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6859 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6860 }
6861 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6862 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6863 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6864 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6865 }
Samuel Ha34229982017-02-17 13:51:11 -08006866 // DevCamDebug metadata translateFromHalMetadata zzHDR
6867 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6868 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6869 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6870 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6871 }
6872 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6873 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006874 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006875 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6876 }
6877 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6878 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6879 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6880 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6881 }
6882 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6883 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006884 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006885 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6886 }
6887 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6888 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6889 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6890 *DevCamDebug_aec_hdr_sensitivity_ratio;
6891 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6892 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6893 }
6894 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6895 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6896 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6897 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6898 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6899 }
6900 // DevCamDebug metadata translateFromHalMetadata ADRC
6901 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6902 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6903 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6904 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6905 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6906 }
6907 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6908 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6909 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6910 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6911 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6912 }
6913 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6914 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6915 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6916 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6917 }
6918 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6919 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6920 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6921 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6922 }
6923 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6924 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6925 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6926 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6927 }
6928 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6929 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6930 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6931 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6932 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006933 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6934 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6935 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6936 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6937 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6938 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6939 }
6940 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6941 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6942 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6943 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6944 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6945 }
6946 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6947 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6948 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6949 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6950 &fwk_DevCamDebug_aec_subject_motion, 1);
6951 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006952 // DevCamDebug metadata translateFromHalMetadata AWB
6953 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6954 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6955 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6956 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6957 }
6958 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6959 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6960 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6961 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6962 }
6963 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6964 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6965 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6966 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6967 }
6968 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6969 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6970 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6971 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6972 }
6973 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6974 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6975 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6976 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6977 }
6978 }
6979 // atrace_end(ATRACE_TAG_ALWAYS);
6980
Thierry Strudel3d639192016-09-09 11:52:26 -07006981 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6982 int64_t fwk_frame_number = *frame_number;
6983 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6984 }
6985
6986 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6987 int32_t fps_range[2];
6988 fps_range[0] = (int32_t)float_range->min_fps;
6989 fps_range[1] = (int32_t)float_range->max_fps;
6990 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6991 fps_range, 2);
6992 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6993 fps_range[0], fps_range[1]);
6994 }
6995
6996 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6997 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6998 }
6999
7000 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7001 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7002 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7003 *sceneMode);
7004 if (NAME_NOT_FOUND != val) {
7005 uint8_t fwkSceneMode = (uint8_t)val;
7006 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7007 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7008 fwkSceneMode);
7009 }
7010 }
7011
7012 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7013 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7014 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7015 }
7016
7017 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7018 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7019 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7020 }
7021
7022 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7023 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7024 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7025 }
7026
7027 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7028 CAM_INTF_META_EDGE_MODE, metadata) {
7029 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7030 }
7031
7032 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7033 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7034 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7035 }
7036
7037 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7038 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7039 }
7040
7041 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7042 if (0 <= *flashState) {
7043 uint8_t fwk_flashState = (uint8_t) *flashState;
7044 if (!gCamCapability[mCameraId]->flash_available) {
7045 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7046 }
7047 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7048 }
7049 }
7050
7051 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7052 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7053 if (NAME_NOT_FOUND != val) {
7054 uint8_t fwk_flashMode = (uint8_t)val;
7055 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7056 }
7057 }
7058
7059 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7060 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7061 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7062 }
7063
7064 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7065 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7066 }
7067
7068 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7069 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7070 }
7071
7072 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7073 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7074 }
7075
7076 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7077 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7078 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7079 }
7080
7081 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7082 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7083 LOGD("fwk_videoStab = %d", fwk_videoStab);
7084 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7085 } else {
7086 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7087 // and so hardcoding the Video Stab result to OFF mode.
7088 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7089 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007090 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007091 }
7092
7093 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7094 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7095 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7096 }
7097
7098 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7099 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7100 }
7101
Thierry Strudel3d639192016-09-09 11:52:26 -07007102 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7103 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007104 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007105
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007106 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7107 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007108
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007109 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007110 blackLevelAppliedPattern->cam_black_level[0],
7111 blackLevelAppliedPattern->cam_black_level[1],
7112 blackLevelAppliedPattern->cam_black_level[2],
7113 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007114 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7115 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007116
7117#ifndef USE_HAL_3_3
7118 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307119 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007120 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307121 fwk_blackLevelInd[0] /= 16.0;
7122 fwk_blackLevelInd[1] /= 16.0;
7123 fwk_blackLevelInd[2] /= 16.0;
7124 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007125 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7126 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007127#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007128 }
7129
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007130#ifndef USE_HAL_3_3
7131 // Fixed whitelevel is used by ISP/Sensor
7132 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7133 &gCamCapability[mCameraId]->white_level, 1);
7134#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007135
7136 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7137 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7138 int32_t scalerCropRegion[4];
7139 scalerCropRegion[0] = hScalerCropRegion->left;
7140 scalerCropRegion[1] = hScalerCropRegion->top;
7141 scalerCropRegion[2] = hScalerCropRegion->width;
7142 scalerCropRegion[3] = hScalerCropRegion->height;
7143
7144 // Adjust crop region from sensor output coordinate system to active
7145 // array coordinate system.
7146 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7147 scalerCropRegion[2], scalerCropRegion[3]);
7148
7149 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7150 }
7151
7152 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7153 LOGD("sensorExpTime = %lld", *sensorExpTime);
7154 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7155 }
7156
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007157 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7158 LOGD("expTimeBoost = %f", *expTimeBoost);
7159 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7160 }
7161
Thierry Strudel3d639192016-09-09 11:52:26 -07007162 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7163 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7164 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7165 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7166 }
7167
7168 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7169 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7170 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7171 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7172 sensorRollingShutterSkew, 1);
7173 }
7174
7175 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7176 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7177 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7178
7179 //calculate the noise profile based on sensitivity
7180 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7181 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7182 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7183 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7184 noise_profile[i] = noise_profile_S;
7185 noise_profile[i+1] = noise_profile_O;
7186 }
7187 LOGD("noise model entry (S, O) is (%f, %f)",
7188 noise_profile_S, noise_profile_O);
7189 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7190 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7191 }
7192
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007193#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007194 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007195 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007196 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007197 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007198 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7199 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7200 }
7201 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007202#endif
7203
Thierry Strudel3d639192016-09-09 11:52:26 -07007204 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7205 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7206 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7207 }
7208
7209 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7210 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7211 *faceDetectMode);
7212 if (NAME_NOT_FOUND != val) {
7213 uint8_t fwk_faceDetectMode = (uint8_t)val;
7214 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7215
7216 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7217 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7218 CAM_INTF_META_FACE_DETECTION, metadata) {
7219 uint8_t numFaces = MIN(
7220 faceDetectionInfo->num_faces_detected, MAX_ROI);
7221 int32_t faceIds[MAX_ROI];
7222 uint8_t faceScores[MAX_ROI];
7223 int32_t faceRectangles[MAX_ROI * 4];
7224 int32_t faceLandmarks[MAX_ROI * 6];
7225 size_t j = 0, k = 0;
7226
7227 for (size_t i = 0; i < numFaces; i++) {
7228 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7229 // Adjust crop region from sensor output coordinate system to active
7230 // array coordinate system.
7231 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7232 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7233 rect.width, rect.height);
7234
7235 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7236 faceRectangles+j, -1);
7237
Jason Lee8ce36fa2017-04-19 19:40:37 -07007238 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7239 "bottom-right (%d, %d)",
7240 faceDetectionInfo->frame_id, i,
7241 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7242 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7243
Thierry Strudel3d639192016-09-09 11:52:26 -07007244 j+= 4;
7245 }
7246 if (numFaces <= 0) {
7247 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7248 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7249 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7250 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7251 }
7252
7253 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7254 numFaces);
7255 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7256 faceRectangles, numFaces * 4U);
7257 if (fwk_faceDetectMode ==
7258 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7259 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7260 CAM_INTF_META_FACE_LANDMARK, metadata) {
7261
7262 for (size_t i = 0; i < numFaces; i++) {
7263 // Map the co-ordinate sensor output coordinate system to active
7264 // array coordinate system.
7265 mCropRegionMapper.toActiveArray(
7266 landmarks->face_landmarks[i].left_eye_center.x,
7267 landmarks->face_landmarks[i].left_eye_center.y);
7268 mCropRegionMapper.toActiveArray(
7269 landmarks->face_landmarks[i].right_eye_center.x,
7270 landmarks->face_landmarks[i].right_eye_center.y);
7271 mCropRegionMapper.toActiveArray(
7272 landmarks->face_landmarks[i].mouth_center.x,
7273 landmarks->face_landmarks[i].mouth_center.y);
7274
7275 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007276
7277 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7278 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7279 faceDetectionInfo->frame_id, i,
7280 faceLandmarks[k + LEFT_EYE_X],
7281 faceLandmarks[k + LEFT_EYE_Y],
7282 faceLandmarks[k + RIGHT_EYE_X],
7283 faceLandmarks[k + RIGHT_EYE_Y],
7284 faceLandmarks[k + MOUTH_X],
7285 faceLandmarks[k + MOUTH_Y]);
7286
Thierry Strudel04e026f2016-10-10 11:27:36 -07007287 k+= TOTAL_LANDMARK_INDICES;
7288 }
7289 } else {
7290 for (size_t i = 0; i < numFaces; i++) {
7291 setInvalidLandmarks(faceLandmarks+k);
7292 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007293 }
7294 }
7295
Jason Lee49619db2017-04-13 12:07:22 -07007296 for (size_t i = 0; i < numFaces; i++) {
7297 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7298
7299 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7300 faceDetectionInfo->frame_id, i, faceIds[i]);
7301 }
7302
Thierry Strudel3d639192016-09-09 11:52:26 -07007303 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7304 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7305 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007306 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007307 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7308 CAM_INTF_META_FACE_BLINK, metadata) {
7309 uint8_t detected[MAX_ROI];
7310 uint8_t degree[MAX_ROI * 2];
7311 for (size_t i = 0; i < numFaces; i++) {
7312 detected[i] = blinks->blink[i].blink_detected;
7313 degree[2 * i] = blinks->blink[i].left_blink;
7314 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007315
Jason Lee49619db2017-04-13 12:07:22 -07007316 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7317 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7318 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7319 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007320 }
7321 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7322 detected, numFaces);
7323 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7324 degree, numFaces * 2);
7325 }
7326 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7327 CAM_INTF_META_FACE_SMILE, metadata) {
7328 uint8_t degree[MAX_ROI];
7329 uint8_t confidence[MAX_ROI];
7330 for (size_t i = 0; i < numFaces; i++) {
7331 degree[i] = smiles->smile[i].smile_degree;
7332 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007333
Jason Lee49619db2017-04-13 12:07:22 -07007334 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7335 "smile_degree=%d, smile_score=%d",
7336 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007337 }
7338 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7339 degree, numFaces);
7340 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7341 confidence, numFaces);
7342 }
7343 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7344 CAM_INTF_META_FACE_GAZE, metadata) {
7345 int8_t angle[MAX_ROI];
7346 int32_t direction[MAX_ROI * 3];
7347 int8_t degree[MAX_ROI * 2];
7348 for (size_t i = 0; i < numFaces; i++) {
7349 angle[i] = gazes->gaze[i].gaze_angle;
7350 direction[3 * i] = gazes->gaze[i].updown_dir;
7351 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7352 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7353 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7354 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007355
7356 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7357 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7358 "left_right_gaze=%d, top_bottom_gaze=%d",
7359 faceDetectionInfo->frame_id, i, angle[i],
7360 direction[3 * i], direction[3 * i + 1],
7361 direction[3 * i + 2],
7362 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007363 }
7364 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7365 (uint8_t *)angle, numFaces);
7366 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7367 direction, numFaces * 3);
7368 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7369 (uint8_t *)degree, numFaces * 2);
7370 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007371 }
7372 }
7373 }
7374 }
7375
7376 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7377 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007378 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007379 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007380 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007381
Shuzhen Wang14415f52016-11-16 18:26:18 -08007382 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7383 histogramBins = *histBins;
7384 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7385 }
7386
7387 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007388 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7389 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007390 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007391
7392 switch (stats_data->type) {
7393 case CAM_HISTOGRAM_TYPE_BAYER:
7394 switch (stats_data->bayer_stats.data_type) {
7395 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007396 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7397 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007398 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007399 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7400 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007401 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007402 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7403 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007404 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007405 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007406 case CAM_STATS_CHANNEL_R:
7407 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007408 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7409 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007410 }
7411 break;
7412 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007413 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007414 break;
7415 }
7416
Shuzhen Wang14415f52016-11-16 18:26:18 -08007417 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007418 }
7419 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007420 }
7421
7422 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7423 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7424 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7425 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7426 }
7427
7428 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7429 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7430 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7431 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7432 }
7433
7434 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7435 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7436 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7437 CAM_MAX_SHADING_MAP_HEIGHT);
7438 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7439 CAM_MAX_SHADING_MAP_WIDTH);
7440 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7441 lensShadingMap->lens_shading, 4U * map_width * map_height);
7442 }
7443
7444 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7445 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7446 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7447 }
7448
7449 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7450 //Populate CAM_INTF_META_TONEMAP_CURVES
7451 /* ch0 = G, ch 1 = B, ch 2 = R*/
7452 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7453 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7454 tonemap->tonemap_points_cnt,
7455 CAM_MAX_TONEMAP_CURVE_SIZE);
7456 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7457 }
7458
7459 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7460 &tonemap->curves[0].tonemap_points[0][0],
7461 tonemap->tonemap_points_cnt * 2);
7462
7463 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7464 &tonemap->curves[1].tonemap_points[0][0],
7465 tonemap->tonemap_points_cnt * 2);
7466
7467 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7468 &tonemap->curves[2].tonemap_points[0][0],
7469 tonemap->tonemap_points_cnt * 2);
7470 }
7471
7472 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7473 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7474 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7475 CC_GAIN_MAX);
7476 }
7477
7478 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7479 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7480 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7481 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7482 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7483 }
7484
7485 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7486 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7487 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7488 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7489 toneCurve->tonemap_points_cnt,
7490 CAM_MAX_TONEMAP_CURVE_SIZE);
7491 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7492 }
7493 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7494 (float*)toneCurve->curve.tonemap_points,
7495 toneCurve->tonemap_points_cnt * 2);
7496 }
7497
7498 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7499 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7500 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7501 predColorCorrectionGains->gains, 4);
7502 }
7503
7504 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7505 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7506 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7507 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7508 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7509 }
7510
7511 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7512 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7513 }
7514
7515 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7516 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7517 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7518 }
7519
7520 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7521 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7522 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7523 }
7524
7525 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7526 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7527 *effectMode);
7528 if (NAME_NOT_FOUND != val) {
7529 uint8_t fwk_effectMode = (uint8_t)val;
7530 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7531 }
7532 }
7533
7534 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7535 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7536 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7537 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7538 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7539 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7540 }
7541 int32_t fwk_testPatternData[4];
7542 fwk_testPatternData[0] = testPatternData->r;
7543 fwk_testPatternData[3] = testPatternData->b;
7544 switch (gCamCapability[mCameraId]->color_arrangement) {
7545 case CAM_FILTER_ARRANGEMENT_RGGB:
7546 case CAM_FILTER_ARRANGEMENT_GRBG:
7547 fwk_testPatternData[1] = testPatternData->gr;
7548 fwk_testPatternData[2] = testPatternData->gb;
7549 break;
7550 case CAM_FILTER_ARRANGEMENT_GBRG:
7551 case CAM_FILTER_ARRANGEMENT_BGGR:
7552 fwk_testPatternData[2] = testPatternData->gr;
7553 fwk_testPatternData[1] = testPatternData->gb;
7554 break;
7555 default:
7556 LOGE("color arrangement %d is not supported",
7557 gCamCapability[mCameraId]->color_arrangement);
7558 break;
7559 }
7560 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7561 }
7562
7563 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7564 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7565 }
7566
7567 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7568 String8 str((const char *)gps_methods);
7569 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7570 }
7571
7572 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7573 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7574 }
7575
7576 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7577 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7578 }
7579
7580 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7581 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7582 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7583 }
7584
7585 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7586 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7587 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7588 }
7589
7590 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7591 int32_t fwk_thumb_size[2];
7592 fwk_thumb_size[0] = thumb_size->width;
7593 fwk_thumb_size[1] = thumb_size->height;
7594 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7595 }
7596
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007597 // Skip reprocess metadata if there is no input stream.
7598 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7599 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7600 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7601 privateData,
7602 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7603 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007604 }
7605
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007606 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007607 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007608 meteringMode, 1);
7609 }
7610
Thierry Strudel54dc9782017-02-15 12:12:10 -08007611 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7612 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7613 LOGD("hdr_scene_data: %d %f\n",
7614 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7615 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7616 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7617 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7618 &isHdr, 1);
7619 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7620 &isHdrConfidence, 1);
7621 }
7622
7623
7624
Thierry Strudel3d639192016-09-09 11:52:26 -07007625 if (metadata->is_tuning_params_valid) {
7626 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7627 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7628 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7629
7630
7631 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7632 sizeof(uint32_t));
7633 data += sizeof(uint32_t);
7634
7635 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7636 sizeof(uint32_t));
7637 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7638 data += sizeof(uint32_t);
7639
7640 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7641 sizeof(uint32_t));
7642 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7643 data += sizeof(uint32_t);
7644
7645 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7646 sizeof(uint32_t));
7647 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7648 data += sizeof(uint32_t);
7649
7650 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7651 sizeof(uint32_t));
7652 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7653 data += sizeof(uint32_t);
7654
7655 metadata->tuning_params.tuning_mod3_data_size = 0;
7656 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7657 sizeof(uint32_t));
7658 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7659 data += sizeof(uint32_t);
7660
7661 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7662 TUNING_SENSOR_DATA_MAX);
7663 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7664 count);
7665 data += count;
7666
7667 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7668 TUNING_VFE_DATA_MAX);
7669 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7670 count);
7671 data += count;
7672
7673 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7674 TUNING_CPP_DATA_MAX);
7675 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7676 count);
7677 data += count;
7678
7679 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7680 TUNING_CAC_DATA_MAX);
7681 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7682 count);
7683 data += count;
7684
7685 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7686 (int32_t *)(void *)tuning_meta_data_blob,
7687 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7688 }
7689
7690 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7691 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7692 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7693 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7694 NEUTRAL_COL_POINTS);
7695 }
7696
7697 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7698 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7699 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7700 }
7701
7702 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7703 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7704 // Adjust crop region from sensor output coordinate system to active
7705 // array coordinate system.
7706 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7707 hAeRegions->rect.width, hAeRegions->rect.height);
7708
7709 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7710 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7711 REGIONS_TUPLE_COUNT);
7712 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7713 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7714 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7715 hAeRegions->rect.height);
7716 }
7717
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007718 if (!pendingRequest.focusStateSent) {
7719 if (pendingRequest.focusStateValid) {
7720 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7721 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007722 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007723 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7724 uint8_t fwk_afState = (uint8_t) *afState;
7725 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7726 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7727 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007728 }
7729 }
7730
Thierry Strudel3d639192016-09-09 11:52:26 -07007731 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7732 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7733 }
7734
7735 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7736 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7737 }
7738
7739 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7740 uint8_t fwk_lensState = *lensState;
7741 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7742 }
7743
Thierry Strudel3d639192016-09-09 11:52:26 -07007744 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007745 uint32_t ab_mode = *hal_ab_mode;
7746 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7747 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7748 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7749 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007750 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007751 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007752 if (NAME_NOT_FOUND != val) {
7753 uint8_t fwk_ab_mode = (uint8_t)val;
7754 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7755 }
7756 }
7757
7758 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7759 int val = lookupFwkName(SCENE_MODES_MAP,
7760 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7761 if (NAME_NOT_FOUND != val) {
7762 uint8_t fwkBestshotMode = (uint8_t)val;
7763 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7764 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7765 } else {
7766 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7767 }
7768 }
7769
7770 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7771 uint8_t fwk_mode = (uint8_t) *mode;
7772 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7773 }
7774
7775 /* Constant metadata values to be update*/
7776 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7777 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7778
7779 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7780 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7781
7782 int32_t hotPixelMap[2];
7783 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7784
7785 // CDS
7786 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7787 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7788 }
7789
Thierry Strudel04e026f2016-10-10 11:27:36 -07007790 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7791 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007792 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007793 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7794 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7795 } else {
7796 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7797 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007798
7799 if(fwk_hdr != curr_hdr_state) {
7800 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7801 if(fwk_hdr)
7802 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7803 else
7804 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7805 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007806 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7807 }
7808
Thierry Strudel54dc9782017-02-15 12:12:10 -08007809 //binning correction
7810 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7811 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7812 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7813 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7814 }
7815
Thierry Strudel04e026f2016-10-10 11:27:36 -07007816 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007817 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007818 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7819 int8_t is_ir_on = 0;
7820
7821 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7822 if(is_ir_on != curr_ir_state) {
7823 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7824 if(is_ir_on)
7825 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7826 else
7827 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7828 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007829 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007830 }
7831
Thierry Strudel269c81a2016-10-12 12:13:59 -07007832 // AEC SPEED
7833 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7834 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7835 }
7836
7837 // AWB SPEED
7838 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7839 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7840 }
7841
Thierry Strudel3d639192016-09-09 11:52:26 -07007842 // TNR
7843 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7844 uint8_t tnr_enable = tnr->denoise_enable;
7845 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007846 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7847 int8_t is_tnr_on = 0;
7848
7849 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7850 if(is_tnr_on != curr_tnr_state) {
7851 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7852 if(is_tnr_on)
7853 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7854 else
7855 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7856 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007857
7858 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7859 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7860 }
7861
7862 // Reprocess crop data
7863 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7864 uint8_t cnt = crop_data->num_of_streams;
7865 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7866 // mm-qcamera-daemon only posts crop_data for streams
7867 // not linked to pproc. So no valid crop metadata is not
7868 // necessarily an error case.
7869 LOGD("No valid crop metadata entries");
7870 } else {
7871 uint32_t reproc_stream_id;
7872 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7873 LOGD("No reprocessible stream found, ignore crop data");
7874 } else {
7875 int rc = NO_ERROR;
7876 Vector<int32_t> roi_map;
7877 int32_t *crop = new int32_t[cnt*4];
7878 if (NULL == crop) {
7879 rc = NO_MEMORY;
7880 }
7881 if (NO_ERROR == rc) {
7882 int32_t streams_found = 0;
7883 for (size_t i = 0; i < cnt; i++) {
7884 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7885 if (pprocDone) {
7886 // HAL already does internal reprocessing,
7887 // either via reprocessing before JPEG encoding,
7888 // or offline postprocessing for pproc bypass case.
7889 crop[0] = 0;
7890 crop[1] = 0;
7891 crop[2] = mInputStreamInfo.dim.width;
7892 crop[3] = mInputStreamInfo.dim.height;
7893 } else {
7894 crop[0] = crop_data->crop_info[i].crop.left;
7895 crop[1] = crop_data->crop_info[i].crop.top;
7896 crop[2] = crop_data->crop_info[i].crop.width;
7897 crop[3] = crop_data->crop_info[i].crop.height;
7898 }
7899 roi_map.add(crop_data->crop_info[i].roi_map.left);
7900 roi_map.add(crop_data->crop_info[i].roi_map.top);
7901 roi_map.add(crop_data->crop_info[i].roi_map.width);
7902 roi_map.add(crop_data->crop_info[i].roi_map.height);
7903 streams_found++;
7904 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7905 crop[0], crop[1], crop[2], crop[3]);
7906 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7907 crop_data->crop_info[i].roi_map.left,
7908 crop_data->crop_info[i].roi_map.top,
7909 crop_data->crop_info[i].roi_map.width,
7910 crop_data->crop_info[i].roi_map.height);
7911 break;
7912
7913 }
7914 }
7915 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7916 &streams_found, 1);
7917 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7918 crop, (size_t)(streams_found * 4));
7919 if (roi_map.array()) {
7920 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7921 roi_map.array(), roi_map.size());
7922 }
7923 }
7924 if (crop) {
7925 delete [] crop;
7926 }
7927 }
7928 }
7929 }
7930
7931 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7932 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7933 // so hardcoding the CAC result to OFF mode.
7934 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7935 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7936 } else {
7937 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7938 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7939 *cacMode);
7940 if (NAME_NOT_FOUND != val) {
7941 uint8_t resultCacMode = (uint8_t)val;
7942 // check whether CAC result from CB is equal to Framework set CAC mode
7943 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007944 if (pendingRequest.fwkCacMode != resultCacMode) {
7945 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007946 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007947 //Check if CAC is disabled by property
7948 if (m_cacModeDisabled) {
7949 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7950 }
7951
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007952 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007953 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7954 } else {
7955 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7956 }
7957 }
7958 }
7959
7960 // Post blob of cam_cds_data through vendor tag.
7961 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7962 uint8_t cnt = cdsInfo->num_of_streams;
7963 cam_cds_data_t cdsDataOverride;
7964 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7965 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7966 cdsDataOverride.num_of_streams = 1;
7967 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7968 uint32_t reproc_stream_id;
7969 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7970 LOGD("No reprocessible stream found, ignore cds data");
7971 } else {
7972 for (size_t i = 0; i < cnt; i++) {
7973 if (cdsInfo->cds_info[i].stream_id ==
7974 reproc_stream_id) {
7975 cdsDataOverride.cds_info[0].cds_enable =
7976 cdsInfo->cds_info[i].cds_enable;
7977 break;
7978 }
7979 }
7980 }
7981 } else {
7982 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7983 }
7984 camMetadata.update(QCAMERA3_CDS_INFO,
7985 (uint8_t *)&cdsDataOverride,
7986 sizeof(cam_cds_data_t));
7987 }
7988
7989 // Ldaf calibration data
7990 if (!mLdafCalibExist) {
7991 IF_META_AVAILABLE(uint32_t, ldafCalib,
7992 CAM_INTF_META_LDAF_EXIF, metadata) {
7993 mLdafCalibExist = true;
7994 mLdafCalib[0] = ldafCalib[0];
7995 mLdafCalib[1] = ldafCalib[1];
7996 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7997 ldafCalib[0], ldafCalib[1]);
7998 }
7999 }
8000
Thierry Strudel54dc9782017-02-15 12:12:10 -08008001 // EXIF debug data through vendor tag
8002 /*
8003 * Mobicat Mask can assume 3 values:
8004 * 1 refers to Mobicat data,
8005 * 2 refers to Stats Debug and Exif Debug Data
8006 * 3 refers to Mobicat and Stats Debug Data
8007 * We want to make sure that we are sending Exif debug data
8008 * only when Mobicat Mask is 2.
8009 */
8010 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8011 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8012 (uint8_t *)(void *)mExifParams.debug_params,
8013 sizeof(mm_jpeg_debug_exif_params_t));
8014 }
8015
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008016 // Reprocess and DDM debug data through vendor tag
8017 cam_reprocess_info_t repro_info;
8018 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008019 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8020 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008021 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008022 }
8023 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8024 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008025 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008026 }
8027 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8028 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008029 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008030 }
8031 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8032 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008033 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008034 }
8035 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8036 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008037 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008038 }
8039 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008040 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008041 }
8042 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8043 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008044 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008045 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008046 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8047 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8048 }
8049 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8050 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8051 }
8052 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8053 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008054
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008055 // INSTANT AEC MODE
8056 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8057 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8058 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8059 }
8060
Shuzhen Wange763e802016-03-31 10:24:29 -07008061 // AF scene change
8062 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8063 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8064 }
8065
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008066 // Enable ZSL
8067 if (enableZsl != nullptr) {
8068 uint8_t value = *enableZsl ?
8069 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8070 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8071 }
8072
Xu Han821ea9c2017-05-23 09:00:40 -07008073 // OIS Data
8074 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8075 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8076 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8077 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8078 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8079 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8080 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8081 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8082 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8083 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8084 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8085 }
8086
Thierry Strudel3d639192016-09-09 11:52:26 -07008087 resultMetadata = camMetadata.release();
8088 return resultMetadata;
8089}
8090
8091/*===========================================================================
8092 * FUNCTION : saveExifParams
8093 *
8094 * DESCRIPTION:
8095 *
8096 * PARAMETERS :
8097 * @metadata : metadata information from callback
8098 *
8099 * RETURN : none
8100 *
8101 *==========================================================================*/
8102void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8103{
8104 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8105 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8106 if (mExifParams.debug_params) {
8107 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8108 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8109 }
8110 }
8111 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8112 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8113 if (mExifParams.debug_params) {
8114 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8115 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8116 }
8117 }
8118 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8119 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8120 if (mExifParams.debug_params) {
8121 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8122 mExifParams.debug_params->af_debug_params_valid = TRUE;
8123 }
8124 }
8125 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8126 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8127 if (mExifParams.debug_params) {
8128 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8129 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8130 }
8131 }
8132 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8133 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8134 if (mExifParams.debug_params) {
8135 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8136 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8137 }
8138 }
8139 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8140 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8141 if (mExifParams.debug_params) {
8142 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8143 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8144 }
8145 }
8146 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8147 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8148 if (mExifParams.debug_params) {
8149 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8150 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8151 }
8152 }
8153 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8154 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8155 if (mExifParams.debug_params) {
8156 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8157 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8158 }
8159 }
8160}
8161
8162/*===========================================================================
8163 * FUNCTION : get3AExifParams
8164 *
8165 * DESCRIPTION:
8166 *
8167 * PARAMETERS : none
8168 *
8169 *
8170 * RETURN : mm_jpeg_exif_params_t
8171 *
8172 *==========================================================================*/
8173mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8174{
8175 return mExifParams;
8176}
8177
8178/*===========================================================================
8179 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8180 *
8181 * DESCRIPTION:
8182 *
8183 * PARAMETERS :
8184 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008185 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8186 * urgent metadata in a batch. Always true for
8187 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008188 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008189 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8190 * i.e. even though it doesn't map to a valid partial
8191 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008192 * RETURN : camera_metadata_t*
8193 * metadata in a format specified by fwk
8194 *==========================================================================*/
8195camera_metadata_t*
8196QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008197 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008198 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008199{
8200 CameraMetadata camMetadata;
8201 camera_metadata_t *resultMetadata;
8202
Shuzhen Wang485e2442017-08-02 12:21:08 -07008203 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008204 /* In batch mode, use empty metadata if this is not the last in batch
8205 */
8206 resultMetadata = allocate_camera_metadata(0, 0);
8207 return resultMetadata;
8208 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008209
8210 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8211 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8212 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8213 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8214 }
8215
8216 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8217 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8218 &aecTrigger->trigger, 1);
8219 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8220 &aecTrigger->trigger_id, 1);
8221 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8222 aecTrigger->trigger);
8223 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8224 aecTrigger->trigger_id);
8225 }
8226
8227 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8228 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8229 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8230 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8231 }
8232
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008233 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8234 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8235 if (NAME_NOT_FOUND != val) {
8236 uint8_t fwkAfMode = (uint8_t)val;
8237 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8238 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8239 } else {
8240 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8241 val);
8242 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008243 }
8244
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008245 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8246 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8247 af_trigger->trigger);
8248 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8249 af_trigger->trigger_id);
8250
8251 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8252 mAfTrigger = *af_trigger;
8253 uint32_t fwk_AfState = (uint32_t) *afState;
8254
8255 // If this is the result for a new trigger, check if there is new early
8256 // af state. If there is, use the last af state for all results
8257 // preceding current partial frame number.
8258 for (auto & pendingRequest : mPendingRequestsList) {
8259 if (pendingRequest.frame_number < frame_number) {
8260 pendingRequest.focusStateValid = true;
8261 pendingRequest.focusState = fwk_AfState;
8262 } else if (pendingRequest.frame_number == frame_number) {
8263 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8264 // Check if early AF state for trigger exists. If yes, send AF state as
8265 // partial result for better latency.
8266 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8267 pendingRequest.focusStateSent = true;
8268 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8269 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8270 frame_number, fwkEarlyAfState);
8271 }
8272 }
8273 }
8274 }
8275 }
8276 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8277 &mAfTrigger.trigger, 1);
8278 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8279
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008280 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8281 /*af regions*/
8282 int32_t afRegions[REGIONS_TUPLE_COUNT];
8283 // Adjust crop region from sensor output coordinate system to active
8284 // array coordinate system.
8285 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8286 hAfRegions->rect.width, hAfRegions->rect.height);
8287
8288 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8289 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8290 REGIONS_TUPLE_COUNT);
8291 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8292 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8293 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8294 hAfRegions->rect.height);
8295 }
8296
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008297 // AF region confidence
8298 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8299 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8300 }
8301
Thierry Strudel3d639192016-09-09 11:52:26 -07008302 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8303 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8304 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8305 if (NAME_NOT_FOUND != val) {
8306 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8307 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8308 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8309 } else {
8310 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8311 }
8312 }
8313
8314 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8315 uint32_t aeMode = CAM_AE_MODE_MAX;
8316 int32_t flashMode = CAM_FLASH_MODE_MAX;
8317 int32_t redeye = -1;
8318 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8319 aeMode = *pAeMode;
8320 }
8321 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8322 flashMode = *pFlashMode;
8323 }
8324 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8325 redeye = *pRedeye;
8326 }
8327
8328 if (1 == redeye) {
8329 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8330 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8331 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8332 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8333 flashMode);
8334 if (NAME_NOT_FOUND != val) {
8335 fwk_aeMode = (uint8_t)val;
8336 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8337 } else {
8338 LOGE("Unsupported flash mode %d", flashMode);
8339 }
8340 } else if (aeMode == CAM_AE_MODE_ON) {
8341 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8342 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8343 } else if (aeMode == CAM_AE_MODE_OFF) {
8344 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8345 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008346 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8347 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8348 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008349 } else {
8350 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8351 "flashMode:%d, aeMode:%u!!!",
8352 redeye, flashMode, aeMode);
8353 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008354 if (mInstantAEC) {
8355 // Increment frame Idx count untill a bound reached for instant AEC.
8356 mInstantAecFrameIdxCount++;
8357 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8358 CAM_INTF_META_AEC_INFO, metadata) {
8359 LOGH("ae_params->settled = %d",ae_params->settled);
8360 // If AEC settled, or if number of frames reached bound value,
8361 // should reset instant AEC.
8362 if (ae_params->settled ||
8363 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8364 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8365 mInstantAEC = false;
8366 mResetInstantAEC = true;
8367 mInstantAecFrameIdxCount = 0;
8368 }
8369 }
8370 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008371 resultMetadata = camMetadata.release();
8372 return resultMetadata;
8373}
8374
8375/*===========================================================================
8376 * FUNCTION : dumpMetadataToFile
8377 *
8378 * DESCRIPTION: Dumps tuning metadata to file system
8379 *
8380 * PARAMETERS :
8381 * @meta : tuning metadata
8382 * @dumpFrameCount : current dump frame count
8383 * @enabled : Enable mask
8384 *
8385 *==========================================================================*/
8386void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8387 uint32_t &dumpFrameCount,
8388 bool enabled,
8389 const char *type,
8390 uint32_t frameNumber)
8391{
8392 //Some sanity checks
8393 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8394 LOGE("Tuning sensor data size bigger than expected %d: %d",
8395 meta.tuning_sensor_data_size,
8396 TUNING_SENSOR_DATA_MAX);
8397 return;
8398 }
8399
8400 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8401 LOGE("Tuning VFE data size bigger than expected %d: %d",
8402 meta.tuning_vfe_data_size,
8403 TUNING_VFE_DATA_MAX);
8404 return;
8405 }
8406
8407 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8408 LOGE("Tuning CPP data size bigger than expected %d: %d",
8409 meta.tuning_cpp_data_size,
8410 TUNING_CPP_DATA_MAX);
8411 return;
8412 }
8413
8414 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8415 LOGE("Tuning CAC data size bigger than expected %d: %d",
8416 meta.tuning_cac_data_size,
8417 TUNING_CAC_DATA_MAX);
8418 return;
8419 }
8420 //
8421
8422 if(enabled){
8423 char timeBuf[FILENAME_MAX];
8424 char buf[FILENAME_MAX];
8425 memset(buf, 0, sizeof(buf));
8426 memset(timeBuf, 0, sizeof(timeBuf));
8427 time_t current_time;
8428 struct tm * timeinfo;
8429 time (&current_time);
8430 timeinfo = localtime (&current_time);
8431 if (timeinfo != NULL) {
8432 strftime (timeBuf, sizeof(timeBuf),
8433 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8434 }
8435 String8 filePath(timeBuf);
8436 snprintf(buf,
8437 sizeof(buf),
8438 "%dm_%s_%d.bin",
8439 dumpFrameCount,
8440 type,
8441 frameNumber);
8442 filePath.append(buf);
8443 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8444 if (file_fd >= 0) {
8445 ssize_t written_len = 0;
8446 meta.tuning_data_version = TUNING_DATA_VERSION;
8447 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8448 written_len += write(file_fd, data, sizeof(uint32_t));
8449 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8450 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8451 written_len += write(file_fd, data, sizeof(uint32_t));
8452 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8453 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8454 written_len += write(file_fd, data, sizeof(uint32_t));
8455 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8456 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8457 written_len += write(file_fd, data, sizeof(uint32_t));
8458 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8459 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8460 written_len += write(file_fd, data, sizeof(uint32_t));
8461 meta.tuning_mod3_data_size = 0;
8462 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8463 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8464 written_len += write(file_fd, data, sizeof(uint32_t));
8465 size_t total_size = meta.tuning_sensor_data_size;
8466 data = (void *)((uint8_t *)&meta.data);
8467 written_len += write(file_fd, data, total_size);
8468 total_size = meta.tuning_vfe_data_size;
8469 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8470 written_len += write(file_fd, data, total_size);
8471 total_size = meta.tuning_cpp_data_size;
8472 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8473 written_len += write(file_fd, data, total_size);
8474 total_size = meta.tuning_cac_data_size;
8475 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8476 written_len += write(file_fd, data, total_size);
8477 close(file_fd);
8478 }else {
8479 LOGE("fail to open file for metadata dumping");
8480 }
8481 }
8482}
8483
8484/*===========================================================================
8485 * FUNCTION : cleanAndSortStreamInfo
8486 *
8487 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8488 * and sort them such that raw stream is at the end of the list
8489 * This is a workaround for camera daemon constraint.
8490 *
8491 * PARAMETERS : None
8492 *
8493 *==========================================================================*/
8494void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8495{
8496 List<stream_info_t *> newStreamInfo;
8497
8498 /*clean up invalid streams*/
8499 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8500 it != mStreamInfo.end();) {
8501 if(((*it)->status) == INVALID){
8502 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8503 delete channel;
8504 free(*it);
8505 it = mStreamInfo.erase(it);
8506 } else {
8507 it++;
8508 }
8509 }
8510
8511 // Move preview/video/callback/snapshot streams into newList
8512 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8513 it != mStreamInfo.end();) {
8514 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8515 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8516 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8517 newStreamInfo.push_back(*it);
8518 it = mStreamInfo.erase(it);
8519 } else
8520 it++;
8521 }
8522 // Move raw streams into newList
8523 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8524 it != mStreamInfo.end();) {
8525 newStreamInfo.push_back(*it);
8526 it = mStreamInfo.erase(it);
8527 }
8528
8529 mStreamInfo = newStreamInfo;
8530}
8531
8532/*===========================================================================
8533 * FUNCTION : extractJpegMetadata
8534 *
8535 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8536 * JPEG metadata is cached in HAL, and return as part of capture
8537 * result when metadata is returned from camera daemon.
8538 *
8539 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8540 * @request: capture request
8541 *
8542 *==========================================================================*/
8543void QCamera3HardwareInterface::extractJpegMetadata(
8544 CameraMetadata& jpegMetadata,
8545 const camera3_capture_request_t *request)
8546{
8547 CameraMetadata frame_settings;
8548 frame_settings = request->settings;
8549
8550 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8551 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8552 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8553 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8554
8555 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8556 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8557 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8558 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8559
8560 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8561 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8562 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8563 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8564
8565 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8566 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8567 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8568 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8569
8570 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8571 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8572 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8573 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8574
8575 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8576 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8577 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8578 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8579
8580 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8581 int32_t thumbnail_size[2];
8582 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8583 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8584 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8585 int32_t orientation =
8586 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008587 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008588 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8589 int32_t temp;
8590 temp = thumbnail_size[0];
8591 thumbnail_size[0] = thumbnail_size[1];
8592 thumbnail_size[1] = temp;
8593 }
8594 }
8595 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8596 thumbnail_size,
8597 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8598 }
8599
8600}
8601
8602/*===========================================================================
8603 * FUNCTION : convertToRegions
8604 *
8605 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8606 *
8607 * PARAMETERS :
8608 * @rect : cam_rect_t struct to convert
8609 * @region : int32_t destination array
8610 * @weight : if we are converting from cam_area_t, weight is valid
8611 * else weight = -1
8612 *
8613 *==========================================================================*/
8614void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8615 int32_t *region, int weight)
8616{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008617 region[FACE_LEFT] = rect.left;
8618 region[FACE_TOP] = rect.top;
8619 region[FACE_RIGHT] = rect.left + rect.width;
8620 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008621 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008622 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008623 }
8624}
8625
8626/*===========================================================================
8627 * FUNCTION : convertFromRegions
8628 *
8629 * DESCRIPTION: helper method to convert from array to cam_rect_t
8630 *
8631 * PARAMETERS :
8632 * @rect : cam_rect_t struct to convert
8633 * @region : int32_t destination array
8634 * @weight : if we are converting from cam_area_t, weight is valid
8635 * else weight = -1
8636 *
8637 *==========================================================================*/
8638void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008639 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008640{
Thierry Strudel3d639192016-09-09 11:52:26 -07008641 int32_t x_min = frame_settings.find(tag).data.i32[0];
8642 int32_t y_min = frame_settings.find(tag).data.i32[1];
8643 int32_t x_max = frame_settings.find(tag).data.i32[2];
8644 int32_t y_max = frame_settings.find(tag).data.i32[3];
8645 roi.weight = frame_settings.find(tag).data.i32[4];
8646 roi.rect.left = x_min;
8647 roi.rect.top = y_min;
8648 roi.rect.width = x_max - x_min;
8649 roi.rect.height = y_max - y_min;
8650}
8651
8652/*===========================================================================
8653 * FUNCTION : resetIfNeededROI
8654 *
8655 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8656 * crop region
8657 *
8658 * PARAMETERS :
8659 * @roi : cam_area_t struct to resize
8660 * @scalerCropRegion : cam_crop_region_t region to compare against
8661 *
8662 *
8663 *==========================================================================*/
8664bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8665 const cam_crop_region_t* scalerCropRegion)
8666{
8667 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8668 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8669 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8670 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8671
8672 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8673 * without having this check the calculations below to validate if the roi
8674 * is inside scalar crop region will fail resulting in the roi not being
8675 * reset causing algorithm to continue to use stale roi window
8676 */
8677 if (roi->weight == 0) {
8678 return true;
8679 }
8680
8681 if ((roi_x_max < scalerCropRegion->left) ||
8682 // right edge of roi window is left of scalar crop's left edge
8683 (roi_y_max < scalerCropRegion->top) ||
8684 // bottom edge of roi window is above scalar crop's top edge
8685 (roi->rect.left > crop_x_max) ||
8686 // left edge of roi window is beyond(right) of scalar crop's right edge
8687 (roi->rect.top > crop_y_max)){
8688 // top edge of roi windo is above scalar crop's top edge
8689 return false;
8690 }
8691 if (roi->rect.left < scalerCropRegion->left) {
8692 roi->rect.left = scalerCropRegion->left;
8693 }
8694 if (roi->rect.top < scalerCropRegion->top) {
8695 roi->rect.top = scalerCropRegion->top;
8696 }
8697 if (roi_x_max > crop_x_max) {
8698 roi_x_max = crop_x_max;
8699 }
8700 if (roi_y_max > crop_y_max) {
8701 roi_y_max = crop_y_max;
8702 }
8703 roi->rect.width = roi_x_max - roi->rect.left;
8704 roi->rect.height = roi_y_max - roi->rect.top;
8705 return true;
8706}
8707
8708/*===========================================================================
8709 * FUNCTION : convertLandmarks
8710 *
8711 * DESCRIPTION: helper method to extract the landmarks from face detection info
8712 *
8713 * PARAMETERS :
8714 * @landmark_data : input landmark data to be converted
8715 * @landmarks : int32_t destination array
8716 *
8717 *
8718 *==========================================================================*/
8719void QCamera3HardwareInterface::convertLandmarks(
8720 cam_face_landmarks_info_t landmark_data,
8721 int32_t *landmarks)
8722{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008723 if (landmark_data.is_left_eye_valid) {
8724 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8725 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8726 } else {
8727 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8728 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8729 }
8730
8731 if (landmark_data.is_right_eye_valid) {
8732 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8733 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8734 } else {
8735 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8736 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8737 }
8738
8739 if (landmark_data.is_mouth_valid) {
8740 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8741 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8742 } else {
8743 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8744 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8745 }
8746}
8747
8748/*===========================================================================
8749 * FUNCTION : setInvalidLandmarks
8750 *
8751 * DESCRIPTION: helper method to set invalid landmarks
8752 *
8753 * PARAMETERS :
8754 * @landmarks : int32_t destination array
8755 *
8756 *
8757 *==========================================================================*/
8758void QCamera3HardwareInterface::setInvalidLandmarks(
8759 int32_t *landmarks)
8760{
8761 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8762 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8763 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8764 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8765 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8766 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008767}
8768
8769#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008770
8771/*===========================================================================
8772 * FUNCTION : getCapabilities
8773 *
8774 * DESCRIPTION: query camera capability from back-end
8775 *
8776 * PARAMETERS :
8777 * @ops : mm-interface ops structure
8778 * @cam_handle : camera handle for which we need capability
8779 *
8780 * RETURN : ptr type of capability structure
8781 * capability for success
8782 * NULL for failure
8783 *==========================================================================*/
8784cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8785 uint32_t cam_handle)
8786{
8787 int rc = NO_ERROR;
8788 QCamera3HeapMemory *capabilityHeap = NULL;
8789 cam_capability_t *cap_ptr = NULL;
8790
8791 if (ops == NULL) {
8792 LOGE("Invalid arguments");
8793 return NULL;
8794 }
8795
8796 capabilityHeap = new QCamera3HeapMemory(1);
8797 if (capabilityHeap == NULL) {
8798 LOGE("creation of capabilityHeap failed");
8799 return NULL;
8800 }
8801
8802 /* Allocate memory for capability buffer */
8803 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8804 if(rc != OK) {
8805 LOGE("No memory for cappability");
8806 goto allocate_failed;
8807 }
8808
8809 /* Map memory for capability buffer */
8810 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8811
8812 rc = ops->map_buf(cam_handle,
8813 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8814 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8815 if(rc < 0) {
8816 LOGE("failed to map capability buffer");
8817 rc = FAILED_TRANSACTION;
8818 goto map_failed;
8819 }
8820
8821 /* Query Capability */
8822 rc = ops->query_capability(cam_handle);
8823 if(rc < 0) {
8824 LOGE("failed to query capability");
8825 rc = FAILED_TRANSACTION;
8826 goto query_failed;
8827 }
8828
8829 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8830 if (cap_ptr == NULL) {
8831 LOGE("out of memory");
8832 rc = NO_MEMORY;
8833 goto query_failed;
8834 }
8835
8836 memset(cap_ptr, 0, sizeof(cam_capability_t));
8837 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8838
8839 int index;
8840 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8841 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8842 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8843 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8844 }
8845
8846query_failed:
8847 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8848map_failed:
8849 capabilityHeap->deallocate();
8850allocate_failed:
8851 delete capabilityHeap;
8852
8853 if (rc != NO_ERROR) {
8854 return NULL;
8855 } else {
8856 return cap_ptr;
8857 }
8858}
8859
Thierry Strudel3d639192016-09-09 11:52:26 -07008860/*===========================================================================
8861 * FUNCTION : initCapabilities
8862 *
8863 * DESCRIPTION: initialize camera capabilities in static data struct
8864 *
8865 * PARAMETERS :
8866 * @cameraId : camera Id
8867 *
8868 * RETURN : int32_t type of status
8869 * NO_ERROR -- success
8870 * none-zero failure code
8871 *==========================================================================*/
8872int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8873{
8874 int rc = 0;
8875 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008876 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008877
8878 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8879 if (rc) {
8880 LOGE("camera_open failed. rc = %d", rc);
8881 goto open_failed;
8882 }
8883 if (!cameraHandle) {
8884 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8885 goto open_failed;
8886 }
8887
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008888 handle = get_main_camera_handle(cameraHandle->camera_handle);
8889 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8890 if (gCamCapability[cameraId] == NULL) {
8891 rc = FAILED_TRANSACTION;
8892 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008893 }
8894
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008895 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008896 if (is_dual_camera_by_idx(cameraId)) {
8897 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8898 gCamCapability[cameraId]->aux_cam_cap =
8899 getCapabilities(cameraHandle->ops, handle);
8900 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8901 rc = FAILED_TRANSACTION;
8902 free(gCamCapability[cameraId]);
8903 goto failed_op;
8904 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008905
8906 // Copy the main camera capability to main_cam_cap struct
8907 gCamCapability[cameraId]->main_cam_cap =
8908 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8909 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8910 LOGE("out of memory");
8911 rc = NO_MEMORY;
8912 goto failed_op;
8913 }
8914 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8915 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008916 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008917failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008918 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8919 cameraHandle = NULL;
8920open_failed:
8921 return rc;
8922}
8923
8924/*==========================================================================
8925 * FUNCTION : get3Aversion
8926 *
8927 * DESCRIPTION: get the Q3A S/W version
8928 *
8929 * PARAMETERS :
8930 * @sw_version: Reference of Q3A structure which will hold version info upon
8931 * return
8932 *
8933 * RETURN : None
8934 *
8935 *==========================================================================*/
8936void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8937{
8938 if(gCamCapability[mCameraId])
8939 sw_version = gCamCapability[mCameraId]->q3a_version;
8940 else
8941 LOGE("Capability structure NULL!");
8942}
8943
8944
8945/*===========================================================================
8946 * FUNCTION : initParameters
8947 *
8948 * DESCRIPTION: initialize camera parameters
8949 *
8950 * PARAMETERS :
8951 *
8952 * RETURN : int32_t type of status
8953 * NO_ERROR -- success
8954 * none-zero failure code
8955 *==========================================================================*/
8956int QCamera3HardwareInterface::initParameters()
8957{
8958 int rc = 0;
8959
8960 //Allocate Set Param Buffer
8961 mParamHeap = new QCamera3HeapMemory(1);
8962 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8963 if(rc != OK) {
8964 rc = NO_MEMORY;
8965 LOGE("Failed to allocate SETPARM Heap memory");
8966 delete mParamHeap;
8967 mParamHeap = NULL;
8968 return rc;
8969 }
8970
8971 //Map memory for parameters buffer
8972 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8973 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8974 mParamHeap->getFd(0),
8975 sizeof(metadata_buffer_t),
8976 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8977 if(rc < 0) {
8978 LOGE("failed to map SETPARM buffer");
8979 rc = FAILED_TRANSACTION;
8980 mParamHeap->deallocate();
8981 delete mParamHeap;
8982 mParamHeap = NULL;
8983 return rc;
8984 }
8985
8986 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8987
8988 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8989 return rc;
8990}
8991
8992/*===========================================================================
8993 * FUNCTION : deinitParameters
8994 *
8995 * DESCRIPTION: de-initialize camera parameters
8996 *
8997 * PARAMETERS :
8998 *
8999 * RETURN : NONE
9000 *==========================================================================*/
9001void QCamera3HardwareInterface::deinitParameters()
9002{
9003 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9004 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9005
9006 mParamHeap->deallocate();
9007 delete mParamHeap;
9008 mParamHeap = NULL;
9009
9010 mParameters = NULL;
9011
9012 free(mPrevParameters);
9013 mPrevParameters = NULL;
9014}
9015
9016/*===========================================================================
9017 * FUNCTION : calcMaxJpegSize
9018 *
9019 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9020 *
9021 * PARAMETERS :
9022 *
9023 * RETURN : max_jpeg_size
9024 *==========================================================================*/
9025size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9026{
9027 size_t max_jpeg_size = 0;
9028 size_t temp_width, temp_height;
9029 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9030 MAX_SIZES_CNT);
9031 for (size_t i = 0; i < count; i++) {
9032 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9033 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9034 if (temp_width * temp_height > max_jpeg_size ) {
9035 max_jpeg_size = temp_width * temp_height;
9036 }
9037 }
9038 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9039 return max_jpeg_size;
9040}
9041
9042/*===========================================================================
9043 * FUNCTION : getMaxRawSize
9044 *
9045 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9046 *
9047 * PARAMETERS :
9048 *
9049 * RETURN : Largest supported Raw Dimension
9050 *==========================================================================*/
9051cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9052{
9053 int max_width = 0;
9054 cam_dimension_t maxRawSize;
9055
9056 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9057 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9058 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9059 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9060 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9061 }
9062 }
9063 return maxRawSize;
9064}
9065
9066
9067/*===========================================================================
9068 * FUNCTION : calcMaxJpegDim
9069 *
9070 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9071 *
9072 * PARAMETERS :
9073 *
9074 * RETURN : max_jpeg_dim
9075 *==========================================================================*/
9076cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9077{
9078 cam_dimension_t max_jpeg_dim;
9079 cam_dimension_t curr_jpeg_dim;
9080 max_jpeg_dim.width = 0;
9081 max_jpeg_dim.height = 0;
9082 curr_jpeg_dim.width = 0;
9083 curr_jpeg_dim.height = 0;
9084 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9085 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9086 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9087 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9088 max_jpeg_dim.width * max_jpeg_dim.height ) {
9089 max_jpeg_dim.width = curr_jpeg_dim.width;
9090 max_jpeg_dim.height = curr_jpeg_dim.height;
9091 }
9092 }
9093 return max_jpeg_dim;
9094}
9095
9096/*===========================================================================
9097 * FUNCTION : addStreamConfig
9098 *
9099 * DESCRIPTION: adds the stream configuration to the array
9100 *
9101 * PARAMETERS :
9102 * @available_stream_configs : pointer to stream configuration array
9103 * @scalar_format : scalar format
9104 * @dim : configuration dimension
9105 * @config_type : input or output configuration type
9106 *
9107 * RETURN : NONE
9108 *==========================================================================*/
9109void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9110 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9111{
9112 available_stream_configs.add(scalar_format);
9113 available_stream_configs.add(dim.width);
9114 available_stream_configs.add(dim.height);
9115 available_stream_configs.add(config_type);
9116}
9117
9118/*===========================================================================
9119 * FUNCTION : suppportBurstCapture
9120 *
9121 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9122 *
9123 * PARAMETERS :
9124 * @cameraId : camera Id
9125 *
9126 * RETURN : true if camera supports BURST_CAPTURE
9127 * false otherwise
9128 *==========================================================================*/
9129bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9130{
9131 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9132 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9133 const int32_t highResWidth = 3264;
9134 const int32_t highResHeight = 2448;
9135
9136 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9137 // Maximum resolution images cannot be captured at >= 10fps
9138 // -> not supporting BURST_CAPTURE
9139 return false;
9140 }
9141
9142 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9143 // Maximum resolution images can be captured at >= 20fps
9144 // --> supporting BURST_CAPTURE
9145 return true;
9146 }
9147
9148 // Find the smallest highRes resolution, or largest resolution if there is none
9149 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9150 MAX_SIZES_CNT);
9151 size_t highRes = 0;
9152 while ((highRes + 1 < totalCnt) &&
9153 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9154 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9155 highResWidth * highResHeight)) {
9156 highRes++;
9157 }
9158 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9159 return true;
9160 } else {
9161 return false;
9162 }
9163}
9164
9165/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009166 * FUNCTION : getPDStatIndex
9167 *
9168 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9169 *
9170 * PARAMETERS :
9171 * @caps : camera capabilities
9172 *
9173 * RETURN : int32_t type
9174 * non-negative - on success
9175 * -1 - on failure
9176 *==========================================================================*/
9177int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9178 if (nullptr == caps) {
9179 return -1;
9180 }
9181
9182 uint32_t metaRawCount = caps->meta_raw_channel_count;
9183 int32_t ret = -1;
9184 for (size_t i = 0; i < metaRawCount; i++) {
9185 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9186 ret = i;
9187 break;
9188 }
9189 }
9190
9191 return ret;
9192}
9193
9194/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009195 * FUNCTION : initStaticMetadata
9196 *
9197 * DESCRIPTION: initialize the static metadata
9198 *
9199 * PARAMETERS :
9200 * @cameraId : camera Id
9201 *
9202 * RETURN : int32_t type of status
9203 * 0 -- success
9204 * non-zero failure code
9205 *==========================================================================*/
9206int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9207{
9208 int rc = 0;
9209 CameraMetadata staticInfo;
9210 size_t count = 0;
9211 bool limitedDevice = false;
9212 char prop[PROPERTY_VALUE_MAX];
9213 bool supportBurst = false;
9214
9215 supportBurst = supportBurstCapture(cameraId);
9216
9217 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9218 * guaranteed or if min fps of max resolution is less than 20 fps, its
9219 * advertised as limited device*/
9220 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9221 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9222 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9223 !supportBurst;
9224
9225 uint8_t supportedHwLvl = limitedDevice ?
9226 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009227#ifndef USE_HAL_3_3
9228 // LEVEL_3 - This device will support level 3.
9229 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9230#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009231 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009232#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009233
9234 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9235 &supportedHwLvl, 1);
9236
9237 bool facingBack = false;
9238 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9239 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9240 facingBack = true;
9241 }
9242 /*HAL 3 only*/
9243 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9244 &gCamCapability[cameraId]->min_focus_distance, 1);
9245
9246 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9247 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9248
9249 /*should be using focal lengths but sensor doesn't provide that info now*/
9250 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9251 &gCamCapability[cameraId]->focal_length,
9252 1);
9253
9254 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9255 gCamCapability[cameraId]->apertures,
9256 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9257
9258 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9259 gCamCapability[cameraId]->filter_densities,
9260 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9261
9262
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009263 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9264 size_t mode_count =
9265 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9266 for (size_t i = 0; i < mode_count; i++) {
9267 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9268 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009269 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009270 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009271
9272 int32_t lens_shading_map_size[] = {
9273 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9274 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9275 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9276 lens_shading_map_size,
9277 sizeof(lens_shading_map_size)/sizeof(int32_t));
9278
9279 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9280 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9281
9282 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9283 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9284
9285 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9286 &gCamCapability[cameraId]->max_frame_duration, 1);
9287
9288 camera_metadata_rational baseGainFactor = {
9289 gCamCapability[cameraId]->base_gain_factor.numerator,
9290 gCamCapability[cameraId]->base_gain_factor.denominator};
9291 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9292 &baseGainFactor, 1);
9293
9294 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9295 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9296
9297 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9298 gCamCapability[cameraId]->pixel_array_size.height};
9299 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9300 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9301
9302 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9303 gCamCapability[cameraId]->active_array_size.top,
9304 gCamCapability[cameraId]->active_array_size.width,
9305 gCamCapability[cameraId]->active_array_size.height};
9306 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9307 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9308
9309 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9310 &gCamCapability[cameraId]->white_level, 1);
9311
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009312 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9313 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9314 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009315 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009316 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009317
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009318#ifndef USE_HAL_3_3
9319 bool hasBlackRegions = false;
9320 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9321 LOGW("black_region_count: %d is bounded to %d",
9322 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9323 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9324 }
9325 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9326 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9327 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9328 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9329 }
9330 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9331 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9332 hasBlackRegions = true;
9333 }
9334#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009335 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9336 &gCamCapability[cameraId]->flash_charge_duration, 1);
9337
9338 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9339 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9340
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009341 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9342 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9343 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009344 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9345 &timestampSource, 1);
9346
Thierry Strudel54dc9782017-02-15 12:12:10 -08009347 //update histogram vendor data
9348 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009349 &gCamCapability[cameraId]->histogram_size, 1);
9350
Thierry Strudel54dc9782017-02-15 12:12:10 -08009351 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009352 &gCamCapability[cameraId]->max_histogram_count, 1);
9353
Shuzhen Wang14415f52016-11-16 18:26:18 -08009354 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9355 //so that app can request fewer number of bins than the maximum supported.
9356 std::vector<int32_t> histBins;
9357 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9358 histBins.push_back(maxHistBins);
9359 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9360 (maxHistBins & 0x1) == 0) {
9361 histBins.push_back(maxHistBins >> 1);
9362 maxHistBins >>= 1;
9363 }
9364 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9365 histBins.data(), histBins.size());
9366
Thierry Strudel3d639192016-09-09 11:52:26 -07009367 int32_t sharpness_map_size[] = {
9368 gCamCapability[cameraId]->sharpness_map_size.width,
9369 gCamCapability[cameraId]->sharpness_map_size.height};
9370
9371 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9372 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9373
9374 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9375 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9376
Emilian Peev0f3c3162017-03-15 12:57:46 +00009377 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9378 if (0 <= indexPD) {
9379 // Advertise PD stats data as part of the Depth capabilities
9380 int32_t depthWidth =
9381 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9382 int32_t depthHeight =
9383 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009384 int32_t depthStride =
9385 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009386 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9387 assert(0 < depthSamplesCount);
9388 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9389 &depthSamplesCount, 1);
9390
9391 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9392 depthHeight,
9393 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9394 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9395 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9396 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9397 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9398
9399 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9400 depthHeight, 33333333,
9401 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9402 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9403 depthMinDuration,
9404 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9405
9406 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9407 depthHeight, 0,
9408 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9409 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9410 depthStallDuration,
9411 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9412
9413 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9414 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009415
9416 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9417 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9418 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009419 }
9420
Thierry Strudel3d639192016-09-09 11:52:26 -07009421 int32_t scalar_formats[] = {
9422 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9423 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9424 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9425 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9426 HAL_PIXEL_FORMAT_RAW10,
9427 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009428 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9429 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9430 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009431
9432 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9433 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9434 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9435 count, MAX_SIZES_CNT, available_processed_sizes);
9436 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9437 available_processed_sizes, count * 2);
9438
9439 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9440 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9441 makeTable(gCamCapability[cameraId]->raw_dim,
9442 count, MAX_SIZES_CNT, available_raw_sizes);
9443 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9444 available_raw_sizes, count * 2);
9445
9446 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9447 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9448 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9449 count, MAX_SIZES_CNT, available_fps_ranges);
9450 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9451 available_fps_ranges, count * 2);
9452
9453 camera_metadata_rational exposureCompensationStep = {
9454 gCamCapability[cameraId]->exp_compensation_step.numerator,
9455 gCamCapability[cameraId]->exp_compensation_step.denominator};
9456 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9457 &exposureCompensationStep, 1);
9458
9459 Vector<uint8_t> availableVstabModes;
9460 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9461 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009462 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009463 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009464 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009465 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009466 count = IS_TYPE_MAX;
9467 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9468 for (size_t i = 0; i < count; i++) {
9469 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9470 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9471 eisSupported = true;
9472 break;
9473 }
9474 }
9475 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009476 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9477 }
9478 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9479 availableVstabModes.array(), availableVstabModes.size());
9480
9481 /*HAL 1 and HAL 3 common*/
9482 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9483 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9484 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009485 // Cap the max zoom to the max preferred value
9486 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009487 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9488 &maxZoom, 1);
9489
9490 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9491 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9492
9493 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9494 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9495 max3aRegions[2] = 0; /* AF not supported */
9496 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9497 max3aRegions, 3);
9498
9499 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9500 memset(prop, 0, sizeof(prop));
9501 property_get("persist.camera.facedetect", prop, "1");
9502 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9503 LOGD("Support face detection mode: %d",
9504 supportedFaceDetectMode);
9505
9506 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009507 /* support mode should be OFF if max number of face is 0 */
9508 if (maxFaces <= 0) {
9509 supportedFaceDetectMode = 0;
9510 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009511 Vector<uint8_t> availableFaceDetectModes;
9512 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9513 if (supportedFaceDetectMode == 1) {
9514 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9515 } else if (supportedFaceDetectMode == 2) {
9516 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9517 } else if (supportedFaceDetectMode == 3) {
9518 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9519 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9520 } else {
9521 maxFaces = 0;
9522 }
9523 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9524 availableFaceDetectModes.array(),
9525 availableFaceDetectModes.size());
9526 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9527 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009528 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9529 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9530 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009531
9532 int32_t exposureCompensationRange[] = {
9533 gCamCapability[cameraId]->exposure_compensation_min,
9534 gCamCapability[cameraId]->exposure_compensation_max};
9535 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9536 exposureCompensationRange,
9537 sizeof(exposureCompensationRange)/sizeof(int32_t));
9538
9539 uint8_t lensFacing = (facingBack) ?
9540 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9541 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9542
9543 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9544 available_thumbnail_sizes,
9545 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9546
9547 /*all sizes will be clubbed into this tag*/
9548 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9549 /*android.scaler.availableStreamConfigurations*/
9550 Vector<int32_t> available_stream_configs;
9551 cam_dimension_t active_array_dim;
9552 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9553 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009554
9555 /*advertise list of input dimensions supported based on below property.
9556 By default all sizes upto 5MP will be advertised.
9557 Note that the setprop resolution format should be WxH.
9558 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9559 To list all supported sizes, setprop needs to be set with "0x0" */
9560 cam_dimension_t minInputSize = {2592,1944}; //5MP
9561 memset(prop, 0, sizeof(prop));
9562 property_get("persist.camera.input.minsize", prop, "2592x1944");
9563 if (strlen(prop) > 0) {
9564 char *saveptr = NULL;
9565 char *token = strtok_r(prop, "x", &saveptr);
9566 if (token != NULL) {
9567 minInputSize.width = atoi(token);
9568 }
9569 token = strtok_r(NULL, "x", &saveptr);
9570 if (token != NULL) {
9571 minInputSize.height = atoi(token);
9572 }
9573 }
9574
Thierry Strudel3d639192016-09-09 11:52:26 -07009575 /* Add input/output stream configurations for each scalar formats*/
9576 for (size_t j = 0; j < scalar_formats_count; j++) {
9577 switch (scalar_formats[j]) {
9578 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9579 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9580 case HAL_PIXEL_FORMAT_RAW10:
9581 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9582 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9583 addStreamConfig(available_stream_configs, scalar_formats[j],
9584 gCamCapability[cameraId]->raw_dim[i],
9585 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9586 }
9587 break;
9588 case HAL_PIXEL_FORMAT_BLOB:
9589 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9590 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9591 addStreamConfig(available_stream_configs, scalar_formats[j],
9592 gCamCapability[cameraId]->picture_sizes_tbl[i],
9593 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9594 }
9595 break;
9596 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9597 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9598 default:
9599 cam_dimension_t largest_picture_size;
9600 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9601 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9602 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9603 addStreamConfig(available_stream_configs, scalar_formats[j],
9604 gCamCapability[cameraId]->picture_sizes_tbl[i],
9605 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009606 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009607 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9608 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009609 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9610 >= minInputSize.width) || (gCamCapability[cameraId]->
9611 picture_sizes_tbl[i].height >= minInputSize.height)) {
9612 addStreamConfig(available_stream_configs, scalar_formats[j],
9613 gCamCapability[cameraId]->picture_sizes_tbl[i],
9614 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9615 }
9616 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009617 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009618
Thierry Strudel3d639192016-09-09 11:52:26 -07009619 break;
9620 }
9621 }
9622
9623 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9624 available_stream_configs.array(), available_stream_configs.size());
9625 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9626 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9627
9628 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9629 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9630
9631 /* android.scaler.availableMinFrameDurations */
9632 Vector<int64_t> available_min_durations;
9633 for (size_t j = 0; j < scalar_formats_count; j++) {
9634 switch (scalar_formats[j]) {
9635 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9636 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9637 case HAL_PIXEL_FORMAT_RAW10:
9638 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9639 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9640 available_min_durations.add(scalar_formats[j]);
9641 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9642 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9643 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9644 }
9645 break;
9646 default:
9647 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9648 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9649 available_min_durations.add(scalar_formats[j]);
9650 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9651 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9652 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9653 }
9654 break;
9655 }
9656 }
9657 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9658 available_min_durations.array(), available_min_durations.size());
9659
9660 Vector<int32_t> available_hfr_configs;
9661 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9662 int32_t fps = 0;
9663 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9664 case CAM_HFR_MODE_60FPS:
9665 fps = 60;
9666 break;
9667 case CAM_HFR_MODE_90FPS:
9668 fps = 90;
9669 break;
9670 case CAM_HFR_MODE_120FPS:
9671 fps = 120;
9672 break;
9673 case CAM_HFR_MODE_150FPS:
9674 fps = 150;
9675 break;
9676 case CAM_HFR_MODE_180FPS:
9677 fps = 180;
9678 break;
9679 case CAM_HFR_MODE_210FPS:
9680 fps = 210;
9681 break;
9682 case CAM_HFR_MODE_240FPS:
9683 fps = 240;
9684 break;
9685 case CAM_HFR_MODE_480FPS:
9686 fps = 480;
9687 break;
9688 case CAM_HFR_MODE_OFF:
9689 case CAM_HFR_MODE_MAX:
9690 default:
9691 break;
9692 }
9693
9694 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9695 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9696 /* For each HFR frame rate, need to advertise one variable fps range
9697 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9698 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9699 * set by the app. When video recording is started, [120, 120] is
9700 * set. This way sensor configuration does not change when recording
9701 * is started */
9702
9703 /* (width, height, fps_min, fps_max, batch_size_max) */
9704 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9705 j < MAX_SIZES_CNT; j++) {
9706 available_hfr_configs.add(
9707 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9708 available_hfr_configs.add(
9709 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9710 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9711 available_hfr_configs.add(fps);
9712 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9713
9714 /* (width, height, fps_min, fps_max, batch_size_max) */
9715 available_hfr_configs.add(
9716 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9717 available_hfr_configs.add(
9718 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9719 available_hfr_configs.add(fps);
9720 available_hfr_configs.add(fps);
9721 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9722 }
9723 }
9724 }
9725 //Advertise HFR capability only if the property is set
9726 memset(prop, 0, sizeof(prop));
9727 property_get("persist.camera.hal3hfr.enable", prop, "1");
9728 uint8_t hfrEnable = (uint8_t)atoi(prop);
9729
9730 if(hfrEnable && available_hfr_configs.array()) {
9731 staticInfo.update(
9732 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9733 available_hfr_configs.array(), available_hfr_configs.size());
9734 }
9735
9736 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9737 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9738 &max_jpeg_size, 1);
9739
9740 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9741 size_t size = 0;
9742 count = CAM_EFFECT_MODE_MAX;
9743 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9744 for (size_t i = 0; i < count; i++) {
9745 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9746 gCamCapability[cameraId]->supported_effects[i]);
9747 if (NAME_NOT_FOUND != val) {
9748 avail_effects[size] = (uint8_t)val;
9749 size++;
9750 }
9751 }
9752 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9753 avail_effects,
9754 size);
9755
9756 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9757 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9758 size_t supported_scene_modes_cnt = 0;
9759 count = CAM_SCENE_MODE_MAX;
9760 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9761 for (size_t i = 0; i < count; i++) {
9762 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9763 CAM_SCENE_MODE_OFF) {
9764 int val = lookupFwkName(SCENE_MODES_MAP,
9765 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9766 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009767
Thierry Strudel3d639192016-09-09 11:52:26 -07009768 if (NAME_NOT_FOUND != val) {
9769 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9770 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9771 supported_scene_modes_cnt++;
9772 }
9773 }
9774 }
9775 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9776 avail_scene_modes,
9777 supported_scene_modes_cnt);
9778
9779 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9780 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9781 supported_scene_modes_cnt,
9782 CAM_SCENE_MODE_MAX,
9783 scene_mode_overrides,
9784 supported_indexes,
9785 cameraId);
9786
9787 if (supported_scene_modes_cnt == 0) {
9788 supported_scene_modes_cnt = 1;
9789 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9790 }
9791
9792 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9793 scene_mode_overrides, supported_scene_modes_cnt * 3);
9794
9795 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9796 ANDROID_CONTROL_MODE_AUTO,
9797 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9798 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9799 available_control_modes,
9800 3);
9801
9802 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9803 size = 0;
9804 count = CAM_ANTIBANDING_MODE_MAX;
9805 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9806 for (size_t i = 0; i < count; i++) {
9807 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9808 gCamCapability[cameraId]->supported_antibandings[i]);
9809 if (NAME_NOT_FOUND != val) {
9810 avail_antibanding_modes[size] = (uint8_t)val;
9811 size++;
9812 }
9813
9814 }
9815 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9816 avail_antibanding_modes,
9817 size);
9818
9819 uint8_t avail_abberation_modes[] = {
9820 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9821 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9822 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9823 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9824 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9825 if (0 == count) {
9826 // If no aberration correction modes are available for a device, this advertise OFF mode
9827 size = 1;
9828 } else {
9829 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9830 // So, advertize all 3 modes if atleast any one mode is supported as per the
9831 // new M requirement
9832 size = 3;
9833 }
9834 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9835 avail_abberation_modes,
9836 size);
9837
9838 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9839 size = 0;
9840 count = CAM_FOCUS_MODE_MAX;
9841 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9842 for (size_t i = 0; i < count; i++) {
9843 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9844 gCamCapability[cameraId]->supported_focus_modes[i]);
9845 if (NAME_NOT_FOUND != val) {
9846 avail_af_modes[size] = (uint8_t)val;
9847 size++;
9848 }
9849 }
9850 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9851 avail_af_modes,
9852 size);
9853
9854 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9855 size = 0;
9856 count = CAM_WB_MODE_MAX;
9857 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9858 for (size_t i = 0; i < count; i++) {
9859 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9860 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9861 gCamCapability[cameraId]->supported_white_balances[i]);
9862 if (NAME_NOT_FOUND != val) {
9863 avail_awb_modes[size] = (uint8_t)val;
9864 size++;
9865 }
9866 }
9867 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9868 avail_awb_modes,
9869 size);
9870
9871 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9872 count = CAM_FLASH_FIRING_LEVEL_MAX;
9873 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9874 count);
9875 for (size_t i = 0; i < count; i++) {
9876 available_flash_levels[i] =
9877 gCamCapability[cameraId]->supported_firing_levels[i];
9878 }
9879 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9880 available_flash_levels, count);
9881
9882 uint8_t flashAvailable;
9883 if (gCamCapability[cameraId]->flash_available)
9884 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9885 else
9886 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9887 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9888 &flashAvailable, 1);
9889
9890 Vector<uint8_t> avail_ae_modes;
9891 count = CAM_AE_MODE_MAX;
9892 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9893 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009894 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9895 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9896 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9897 }
9898 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009899 }
9900 if (flashAvailable) {
9901 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9902 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9903 }
9904 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9905 avail_ae_modes.array(),
9906 avail_ae_modes.size());
9907
9908 int32_t sensitivity_range[2];
9909 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9910 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9911 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9912 sensitivity_range,
9913 sizeof(sensitivity_range) / sizeof(int32_t));
9914
9915 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9916 &gCamCapability[cameraId]->max_analog_sensitivity,
9917 1);
9918
9919 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9920 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9921 &sensor_orientation,
9922 1);
9923
9924 int32_t max_output_streams[] = {
9925 MAX_STALLING_STREAMS,
9926 MAX_PROCESSED_STREAMS,
9927 MAX_RAW_STREAMS};
9928 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9929 max_output_streams,
9930 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9931
9932 uint8_t avail_leds = 0;
9933 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9934 &avail_leds, 0);
9935
9936 uint8_t focus_dist_calibrated;
9937 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9938 gCamCapability[cameraId]->focus_dist_calibrated);
9939 if (NAME_NOT_FOUND != val) {
9940 focus_dist_calibrated = (uint8_t)val;
9941 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9942 &focus_dist_calibrated, 1);
9943 }
9944
9945 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9946 size = 0;
9947 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9948 MAX_TEST_PATTERN_CNT);
9949 for (size_t i = 0; i < count; i++) {
9950 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9951 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9952 if (NAME_NOT_FOUND != testpatternMode) {
9953 avail_testpattern_modes[size] = testpatternMode;
9954 size++;
9955 }
9956 }
9957 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9958 avail_testpattern_modes,
9959 size);
9960
9961 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9962 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9963 &max_pipeline_depth,
9964 1);
9965
9966 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9967 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9968 &partial_result_count,
9969 1);
9970
9971 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9972 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9973
9974 Vector<uint8_t> available_capabilities;
9975 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9976 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9977 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9978 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9979 if (supportBurst) {
9980 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9981 }
9982 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9983 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9984 if (hfrEnable && available_hfr_configs.array()) {
9985 available_capabilities.add(
9986 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9987 }
9988
9989 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9990 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9991 }
9992 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9993 available_capabilities.array(),
9994 available_capabilities.size());
9995
9996 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9997 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9998 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9999 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10000
10001 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10002 &aeLockAvailable, 1);
10003
10004 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10005 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10006 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10007 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10008
10009 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10010 &awbLockAvailable, 1);
10011
10012 int32_t max_input_streams = 1;
10013 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10014 &max_input_streams,
10015 1);
10016
10017 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10018 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10019 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10020 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10021 HAL_PIXEL_FORMAT_YCbCr_420_888};
10022 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10023 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10024
10025 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10026 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10027 &max_latency,
10028 1);
10029
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010030#ifndef USE_HAL_3_3
10031 int32_t isp_sensitivity_range[2];
10032 isp_sensitivity_range[0] =
10033 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10034 isp_sensitivity_range[1] =
10035 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10036 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10037 isp_sensitivity_range,
10038 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10039#endif
10040
Thierry Strudel3d639192016-09-09 11:52:26 -070010041 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10042 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10043 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10044 available_hot_pixel_modes,
10045 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10046
10047 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10048 ANDROID_SHADING_MODE_FAST,
10049 ANDROID_SHADING_MODE_HIGH_QUALITY};
10050 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10051 available_shading_modes,
10052 3);
10053
10054 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10055 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10056 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10057 available_lens_shading_map_modes,
10058 2);
10059
10060 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10061 ANDROID_EDGE_MODE_FAST,
10062 ANDROID_EDGE_MODE_HIGH_QUALITY,
10063 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10064 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10065 available_edge_modes,
10066 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10067
10068 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10069 ANDROID_NOISE_REDUCTION_MODE_FAST,
10070 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10071 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10072 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10073 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10074 available_noise_red_modes,
10075 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10076
10077 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10078 ANDROID_TONEMAP_MODE_FAST,
10079 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10080 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10081 available_tonemap_modes,
10082 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10083
10084 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10085 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10086 available_hot_pixel_map_modes,
10087 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10088
10089 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10090 gCamCapability[cameraId]->reference_illuminant1);
10091 if (NAME_NOT_FOUND != val) {
10092 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10093 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10094 }
10095
10096 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10097 gCamCapability[cameraId]->reference_illuminant2);
10098 if (NAME_NOT_FOUND != val) {
10099 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10100 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10101 }
10102
10103 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10104 (void *)gCamCapability[cameraId]->forward_matrix1,
10105 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10106
10107 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10108 (void *)gCamCapability[cameraId]->forward_matrix2,
10109 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10110
10111 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10112 (void *)gCamCapability[cameraId]->color_transform1,
10113 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10114
10115 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10116 (void *)gCamCapability[cameraId]->color_transform2,
10117 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10118
10119 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10120 (void *)gCamCapability[cameraId]->calibration_transform1,
10121 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10122
10123 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10124 (void *)gCamCapability[cameraId]->calibration_transform2,
10125 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10126
10127 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10128 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10129 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10130 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10131 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10132 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10133 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10134 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10135 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10136 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10137 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10138 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10139 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10140 ANDROID_JPEG_GPS_COORDINATES,
10141 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10142 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10143 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10144 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10145 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10146 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10147 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10148 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10149 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10150 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010151#ifndef USE_HAL_3_3
10152 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10153#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010154 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010155 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010156 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10157 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010158 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010159 /* DevCamDebug metadata request_keys_basic */
10160 DEVCAMDEBUG_META_ENABLE,
10161 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010162 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010163 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010164 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010165 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010166 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010167 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010168
10169 size_t request_keys_cnt =
10170 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10171 Vector<int32_t> available_request_keys;
10172 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10173 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10174 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10175 }
10176
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010177 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010178 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10179 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10180 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010181 }
10182
Thierry Strudel3d639192016-09-09 11:52:26 -070010183 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10184 available_request_keys.array(), available_request_keys.size());
10185
10186 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10187 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10188 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10189 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10190 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10191 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10192 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10193 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10194 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10195 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10196 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10197 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10198 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10199 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10200 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10201 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10202 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010203 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010204 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10205 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10206 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010207 ANDROID_STATISTICS_FACE_SCORES,
10208#ifndef USE_HAL_3_3
10209 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10210#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010211 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010212 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010213 // DevCamDebug metadata result_keys_basic
10214 DEVCAMDEBUG_META_ENABLE,
10215 // DevCamDebug metadata result_keys AF
10216 DEVCAMDEBUG_AF_LENS_POSITION,
10217 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10218 DEVCAMDEBUG_AF_TOF_DISTANCE,
10219 DEVCAMDEBUG_AF_LUMA,
10220 DEVCAMDEBUG_AF_HAF_STATE,
10221 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10222 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10223 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10224 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10225 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10226 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10227 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10228 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10229 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10230 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10231 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10232 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10233 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10234 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10235 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10236 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10237 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10238 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10239 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10240 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10241 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10242 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10243 // DevCamDebug metadata result_keys AEC
10244 DEVCAMDEBUG_AEC_TARGET_LUMA,
10245 DEVCAMDEBUG_AEC_COMP_LUMA,
10246 DEVCAMDEBUG_AEC_AVG_LUMA,
10247 DEVCAMDEBUG_AEC_CUR_LUMA,
10248 DEVCAMDEBUG_AEC_LINECOUNT,
10249 DEVCAMDEBUG_AEC_REAL_GAIN,
10250 DEVCAMDEBUG_AEC_EXP_INDEX,
10251 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010252 // DevCamDebug metadata result_keys zzHDR
10253 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10254 DEVCAMDEBUG_AEC_L_LINECOUNT,
10255 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10256 DEVCAMDEBUG_AEC_S_LINECOUNT,
10257 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10258 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10259 // DevCamDebug metadata result_keys ADRC
10260 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10261 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10262 DEVCAMDEBUG_AEC_GTM_RATIO,
10263 DEVCAMDEBUG_AEC_LTM_RATIO,
10264 DEVCAMDEBUG_AEC_LA_RATIO,
10265 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010266 // DevCamDebug metadata result_keys AEC MOTION
10267 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10268 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10269 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010270 // DevCamDebug metadata result_keys AWB
10271 DEVCAMDEBUG_AWB_R_GAIN,
10272 DEVCAMDEBUG_AWB_G_GAIN,
10273 DEVCAMDEBUG_AWB_B_GAIN,
10274 DEVCAMDEBUG_AWB_CCT,
10275 DEVCAMDEBUG_AWB_DECISION,
10276 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010277 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10278 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10279 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010280 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010281 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010282 };
10283
Thierry Strudel3d639192016-09-09 11:52:26 -070010284 size_t result_keys_cnt =
10285 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10286
10287 Vector<int32_t> available_result_keys;
10288 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10289 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10290 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10291 }
10292 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10293 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10294 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10295 }
10296 if (supportedFaceDetectMode == 1) {
10297 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10298 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10299 } else if ((supportedFaceDetectMode == 2) ||
10300 (supportedFaceDetectMode == 3)) {
10301 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10302 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10303 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010304#ifndef USE_HAL_3_3
10305 if (hasBlackRegions) {
10306 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10307 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10308 }
10309#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010310
10311 if (gExposeEnableZslKey) {
10312 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10313 }
10314
Thierry Strudel3d639192016-09-09 11:52:26 -070010315 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10316 available_result_keys.array(), available_result_keys.size());
10317
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010318 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010319 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10320 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10321 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10322 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10323 ANDROID_SCALER_CROPPING_TYPE,
10324 ANDROID_SYNC_MAX_LATENCY,
10325 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10326 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10327 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10328 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10329 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10330 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10331 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10332 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10333 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10334 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10335 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10336 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10337 ANDROID_LENS_FACING,
10338 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10339 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10340 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10341 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10342 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10343 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10344 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10345 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10346 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10347 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10348 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10349 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10350 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10351 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10352 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10353 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10354 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10355 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10356 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10357 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010358 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010359 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10360 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10361 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10362 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10363 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10364 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10365 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10366 ANDROID_CONTROL_AVAILABLE_MODES,
10367 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10368 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10369 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10370 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010371 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10372#ifndef USE_HAL_3_3
10373 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10374 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10375#endif
10376 };
10377
10378 Vector<int32_t> available_characteristics_keys;
10379 available_characteristics_keys.appendArray(characteristics_keys_basic,
10380 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10381#ifndef USE_HAL_3_3
10382 if (hasBlackRegions) {
10383 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10384 }
10385#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010386
10387 if (0 <= indexPD) {
10388 int32_t depthKeys[] = {
10389 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10390 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10391 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10392 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10393 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10394 };
10395 available_characteristics_keys.appendArray(depthKeys,
10396 sizeof(depthKeys) / sizeof(depthKeys[0]));
10397 }
10398
Thierry Strudel3d639192016-09-09 11:52:26 -070010399 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010400 available_characteristics_keys.array(),
10401 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010402
10403 /*available stall durations depend on the hw + sw and will be different for different devices */
10404 /*have to add for raw after implementation*/
10405 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10406 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10407
10408 Vector<int64_t> available_stall_durations;
10409 for (uint32_t j = 0; j < stall_formats_count; j++) {
10410 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10411 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10412 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10413 available_stall_durations.add(stall_formats[j]);
10414 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10415 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10416 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10417 }
10418 } else {
10419 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10420 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10421 available_stall_durations.add(stall_formats[j]);
10422 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10423 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10424 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10425 }
10426 }
10427 }
10428 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10429 available_stall_durations.array(),
10430 available_stall_durations.size());
10431
10432 //QCAMERA3_OPAQUE_RAW
10433 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10434 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10435 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10436 case LEGACY_RAW:
10437 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10438 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10439 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10440 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10441 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10442 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10443 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10444 break;
10445 case MIPI_RAW:
10446 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10447 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10448 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10449 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10450 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10451 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10452 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10453 break;
10454 default:
10455 LOGE("unknown opaque_raw_format %d",
10456 gCamCapability[cameraId]->opaque_raw_fmt);
10457 break;
10458 }
10459 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10460
10461 Vector<int32_t> strides;
10462 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10463 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10464 cam_stream_buf_plane_info_t buf_planes;
10465 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10466 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10467 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10468 &gCamCapability[cameraId]->padding_info, &buf_planes);
10469 strides.add(buf_planes.plane_info.mp[0].stride);
10470 }
10471 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10472 strides.size());
10473
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010474 //TBD: remove the following line once backend advertises zzHDR in feature mask
10475 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010476 //Video HDR default
10477 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10478 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010479 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010480 int32_t vhdr_mode[] = {
10481 QCAMERA3_VIDEO_HDR_MODE_OFF,
10482 QCAMERA3_VIDEO_HDR_MODE_ON};
10483
10484 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10485 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10486 vhdr_mode, vhdr_mode_count);
10487 }
10488
Thierry Strudel3d639192016-09-09 11:52:26 -070010489 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10490 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10491 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10492
10493 uint8_t isMonoOnly =
10494 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10495 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10496 &isMonoOnly, 1);
10497
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010498#ifndef USE_HAL_3_3
10499 Vector<int32_t> opaque_size;
10500 for (size_t j = 0; j < scalar_formats_count; j++) {
10501 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10502 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10503 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10504 cam_stream_buf_plane_info_t buf_planes;
10505
10506 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10507 &gCamCapability[cameraId]->padding_info, &buf_planes);
10508
10509 if (rc == 0) {
10510 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10511 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10512 opaque_size.add(buf_planes.plane_info.frame_len);
10513 }else {
10514 LOGE("raw frame calculation failed!");
10515 }
10516 }
10517 }
10518 }
10519
10520 if ((opaque_size.size() > 0) &&
10521 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10522 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10523 else
10524 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10525#endif
10526
Thierry Strudel04e026f2016-10-10 11:27:36 -070010527 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10528 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10529 size = 0;
10530 count = CAM_IR_MODE_MAX;
10531 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10532 for (size_t i = 0; i < count; i++) {
10533 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10534 gCamCapability[cameraId]->supported_ir_modes[i]);
10535 if (NAME_NOT_FOUND != val) {
10536 avail_ir_modes[size] = (int32_t)val;
10537 size++;
10538 }
10539 }
10540 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10541 avail_ir_modes, size);
10542 }
10543
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010544 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10545 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10546 size = 0;
10547 count = CAM_AEC_CONVERGENCE_MAX;
10548 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10549 for (size_t i = 0; i < count; i++) {
10550 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10551 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10552 if (NAME_NOT_FOUND != val) {
10553 available_instant_aec_modes[size] = (int32_t)val;
10554 size++;
10555 }
10556 }
10557 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10558 available_instant_aec_modes, size);
10559 }
10560
Thierry Strudel54dc9782017-02-15 12:12:10 -080010561 int32_t sharpness_range[] = {
10562 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10563 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10564 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10565
10566 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10567 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10568 size = 0;
10569 count = CAM_BINNING_CORRECTION_MODE_MAX;
10570 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10571 for (size_t i = 0; i < count; i++) {
10572 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10573 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10574 gCamCapability[cameraId]->supported_binning_modes[i]);
10575 if (NAME_NOT_FOUND != val) {
10576 avail_binning_modes[size] = (int32_t)val;
10577 size++;
10578 }
10579 }
10580 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10581 avail_binning_modes, size);
10582 }
10583
10584 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10585 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10586 size = 0;
10587 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10588 for (size_t i = 0; i < count; i++) {
10589 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10590 gCamCapability[cameraId]->supported_aec_modes[i]);
10591 if (NAME_NOT_FOUND != val)
10592 available_aec_modes[size++] = val;
10593 }
10594 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10595 available_aec_modes, size);
10596 }
10597
10598 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10599 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10600 size = 0;
10601 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10602 for (size_t i = 0; i < count; i++) {
10603 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10604 gCamCapability[cameraId]->supported_iso_modes[i]);
10605 if (NAME_NOT_FOUND != val)
10606 available_iso_modes[size++] = val;
10607 }
10608 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10609 available_iso_modes, size);
10610 }
10611
10612 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010613 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010614 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10615 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10616 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10617
10618 int32_t available_saturation_range[4];
10619 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10620 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10621 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10622 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10623 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10624 available_saturation_range, 4);
10625
10626 uint8_t is_hdr_values[2];
10627 is_hdr_values[0] = 0;
10628 is_hdr_values[1] = 1;
10629 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10630 is_hdr_values, 2);
10631
10632 float is_hdr_confidence_range[2];
10633 is_hdr_confidence_range[0] = 0.0;
10634 is_hdr_confidence_range[1] = 1.0;
10635 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10636 is_hdr_confidence_range, 2);
10637
Emilian Peev0a972ef2017-03-16 10:25:53 +000010638 size_t eepromLength = strnlen(
10639 reinterpret_cast<const char *>(
10640 gCamCapability[cameraId]->eeprom_version_info),
10641 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10642 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010643 char easelInfo[] = ",E:N";
10644 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10645 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10646 eepromLength += sizeof(easelInfo);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010647 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10648 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010649 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010650 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010651 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10652 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10653 }
10654
Thierry Strudel3d639192016-09-09 11:52:26 -070010655 gStaticMetadata[cameraId] = staticInfo.release();
10656 return rc;
10657}
10658
10659/*===========================================================================
10660 * FUNCTION : makeTable
10661 *
10662 * DESCRIPTION: make a table of sizes
10663 *
10664 * PARAMETERS :
10665 *
10666 *
10667 *==========================================================================*/
10668void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10669 size_t max_size, int32_t *sizeTable)
10670{
10671 size_t j = 0;
10672 if (size > max_size) {
10673 size = max_size;
10674 }
10675 for (size_t i = 0; i < size; i++) {
10676 sizeTable[j] = dimTable[i].width;
10677 sizeTable[j+1] = dimTable[i].height;
10678 j+=2;
10679 }
10680}
10681
10682/*===========================================================================
10683 * FUNCTION : makeFPSTable
10684 *
10685 * DESCRIPTION: make a table of fps ranges
10686 *
10687 * PARAMETERS :
10688 *
10689 *==========================================================================*/
10690void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10691 size_t max_size, int32_t *fpsRangesTable)
10692{
10693 size_t j = 0;
10694 if (size > max_size) {
10695 size = max_size;
10696 }
10697 for (size_t i = 0; i < size; i++) {
10698 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10699 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10700 j+=2;
10701 }
10702}
10703
10704/*===========================================================================
10705 * FUNCTION : makeOverridesList
10706 *
10707 * DESCRIPTION: make a list of scene mode overrides
10708 *
10709 * PARAMETERS :
10710 *
10711 *
10712 *==========================================================================*/
10713void QCamera3HardwareInterface::makeOverridesList(
10714 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10715 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10716{
10717 /*daemon will give a list of overrides for all scene modes.
10718 However we should send the fwk only the overrides for the scene modes
10719 supported by the framework*/
10720 size_t j = 0;
10721 if (size > max_size) {
10722 size = max_size;
10723 }
10724 size_t focus_count = CAM_FOCUS_MODE_MAX;
10725 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10726 focus_count);
10727 for (size_t i = 0; i < size; i++) {
10728 bool supt = false;
10729 size_t index = supported_indexes[i];
10730 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10731 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10732 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10733 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10734 overridesTable[index].awb_mode);
10735 if (NAME_NOT_FOUND != val) {
10736 overridesList[j+1] = (uint8_t)val;
10737 }
10738 uint8_t focus_override = overridesTable[index].af_mode;
10739 for (size_t k = 0; k < focus_count; k++) {
10740 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10741 supt = true;
10742 break;
10743 }
10744 }
10745 if (supt) {
10746 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10747 focus_override);
10748 if (NAME_NOT_FOUND != val) {
10749 overridesList[j+2] = (uint8_t)val;
10750 }
10751 } else {
10752 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10753 }
10754 j+=3;
10755 }
10756}
10757
10758/*===========================================================================
10759 * FUNCTION : filterJpegSizes
10760 *
10761 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10762 * could be downscaled to
10763 *
10764 * PARAMETERS :
10765 *
10766 * RETURN : length of jpegSizes array
10767 *==========================================================================*/
10768
10769size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10770 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10771 uint8_t downscale_factor)
10772{
10773 if (0 == downscale_factor) {
10774 downscale_factor = 1;
10775 }
10776
10777 int32_t min_width = active_array_size.width / downscale_factor;
10778 int32_t min_height = active_array_size.height / downscale_factor;
10779 size_t jpegSizesCnt = 0;
10780 if (processedSizesCnt > maxCount) {
10781 processedSizesCnt = maxCount;
10782 }
10783 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10784 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10785 jpegSizes[jpegSizesCnt] = processedSizes[i];
10786 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10787 jpegSizesCnt += 2;
10788 }
10789 }
10790 return jpegSizesCnt;
10791}
10792
10793/*===========================================================================
10794 * FUNCTION : computeNoiseModelEntryS
10795 *
10796 * DESCRIPTION: function to map a given sensitivity to the S noise
10797 * model parameters in the DNG noise model.
10798 *
10799 * PARAMETERS : sens : the sensor sensitivity
10800 *
10801 ** RETURN : S (sensor amplification) noise
10802 *
10803 *==========================================================================*/
10804double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10805 double s = gCamCapability[mCameraId]->gradient_S * sens +
10806 gCamCapability[mCameraId]->offset_S;
10807 return ((s < 0.0) ? 0.0 : s);
10808}
10809
10810/*===========================================================================
10811 * FUNCTION : computeNoiseModelEntryO
10812 *
10813 * DESCRIPTION: function to map a given sensitivity to the O noise
10814 * model parameters in the DNG noise model.
10815 *
10816 * PARAMETERS : sens : the sensor sensitivity
10817 *
10818 ** RETURN : O (sensor readout) noise
10819 *
10820 *==========================================================================*/
10821double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10822 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10823 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10824 1.0 : (1.0 * sens / max_analog_sens);
10825 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10826 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10827 return ((o < 0.0) ? 0.0 : o);
10828}
10829
10830/*===========================================================================
10831 * FUNCTION : getSensorSensitivity
10832 *
10833 * DESCRIPTION: convert iso_mode to an integer value
10834 *
10835 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10836 *
10837 ** RETURN : sensitivity supported by sensor
10838 *
10839 *==========================================================================*/
10840int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10841{
10842 int32_t sensitivity;
10843
10844 switch (iso_mode) {
10845 case CAM_ISO_MODE_100:
10846 sensitivity = 100;
10847 break;
10848 case CAM_ISO_MODE_200:
10849 sensitivity = 200;
10850 break;
10851 case CAM_ISO_MODE_400:
10852 sensitivity = 400;
10853 break;
10854 case CAM_ISO_MODE_800:
10855 sensitivity = 800;
10856 break;
10857 case CAM_ISO_MODE_1600:
10858 sensitivity = 1600;
10859 break;
10860 default:
10861 sensitivity = -1;
10862 break;
10863 }
10864 return sensitivity;
10865}
10866
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010867int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010868 if (gEaselManagerClient == nullptr) {
10869 gEaselManagerClient = EaselManagerClient::create();
10870 if (gEaselManagerClient == nullptr) {
10871 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10872 return -ENODEV;
10873 }
10874 }
10875
10876 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010877 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10878 // to connect to Easel.
10879 bool doNotpowerOnEasel =
10880 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10881
10882 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010883 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10884 return OK;
10885 }
10886
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010887 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010888 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010889 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010890 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010891 return res;
10892 }
10893
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010894 EaselManagerClientOpened = true;
10895
Chien-Yu Chen44abb642017-06-02 18:00:38 -070010896 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010897 if (res != OK) {
10898 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10899 }
10900
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010901 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010902 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010903
10904 // Expose enableZsl key only when HDR+ mode is enabled.
10905 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010906 }
10907
10908 return OK;
10909}
10910
Thierry Strudel3d639192016-09-09 11:52:26 -070010911/*===========================================================================
10912 * FUNCTION : getCamInfo
10913 *
10914 * DESCRIPTION: query camera capabilities
10915 *
10916 * PARAMETERS :
10917 * @cameraId : camera Id
10918 * @info : camera info struct to be filled in with camera capabilities
10919 *
10920 * RETURN : int type of status
10921 * NO_ERROR -- success
10922 * none-zero failure code
10923 *==========================================================================*/
10924int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10925 struct camera_info *info)
10926{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010927 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010928 int rc = 0;
10929
10930 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010931
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010932 {
10933 Mutex::Autolock l(gHdrPlusClientLock);
10934 rc = initHdrPlusClientLocked();
10935 if (rc != OK) {
10936 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10937 pthread_mutex_unlock(&gCamLock);
10938 return rc;
10939 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010940 }
10941
Thierry Strudel3d639192016-09-09 11:52:26 -070010942 if (NULL == gCamCapability[cameraId]) {
10943 rc = initCapabilities(cameraId);
10944 if (rc < 0) {
10945 pthread_mutex_unlock(&gCamLock);
10946 return rc;
10947 }
10948 }
10949
10950 if (NULL == gStaticMetadata[cameraId]) {
10951 rc = initStaticMetadata(cameraId);
10952 if (rc < 0) {
10953 pthread_mutex_unlock(&gCamLock);
10954 return rc;
10955 }
10956 }
10957
10958 switch(gCamCapability[cameraId]->position) {
10959 case CAM_POSITION_BACK:
10960 case CAM_POSITION_BACK_AUX:
10961 info->facing = CAMERA_FACING_BACK;
10962 break;
10963
10964 case CAM_POSITION_FRONT:
10965 case CAM_POSITION_FRONT_AUX:
10966 info->facing = CAMERA_FACING_FRONT;
10967 break;
10968
10969 default:
10970 LOGE("Unknown position type %d for camera id:%d",
10971 gCamCapability[cameraId]->position, cameraId);
10972 rc = -1;
10973 break;
10974 }
10975
10976
10977 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010978#ifndef USE_HAL_3_3
10979 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10980#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010981 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010982#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010983 info->static_camera_characteristics = gStaticMetadata[cameraId];
10984
10985 //For now assume both cameras can operate independently.
10986 info->conflicting_devices = NULL;
10987 info->conflicting_devices_length = 0;
10988
10989 //resource cost is 100 * MIN(1.0, m/M),
10990 //where m is throughput requirement with maximum stream configuration
10991 //and M is CPP maximum throughput.
10992 float max_fps = 0.0;
10993 for (uint32_t i = 0;
10994 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10995 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10996 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10997 }
10998 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10999 gCamCapability[cameraId]->active_array_size.width *
11000 gCamCapability[cameraId]->active_array_size.height * max_fps /
11001 gCamCapability[cameraId]->max_pixel_bandwidth;
11002 info->resource_cost = 100 * MIN(1.0, ratio);
11003 LOGI("camera %d resource cost is %d", cameraId,
11004 info->resource_cost);
11005
11006 pthread_mutex_unlock(&gCamLock);
11007 return rc;
11008}
11009
11010/*===========================================================================
11011 * FUNCTION : translateCapabilityToMetadata
11012 *
11013 * DESCRIPTION: translate the capability into camera_metadata_t
11014 *
11015 * PARAMETERS : type of the request
11016 *
11017 *
11018 * RETURN : success: camera_metadata_t*
11019 * failure: NULL
11020 *
11021 *==========================================================================*/
11022camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11023{
11024 if (mDefaultMetadata[type] != NULL) {
11025 return mDefaultMetadata[type];
11026 }
11027 //first time we are handling this request
11028 //fill up the metadata structure using the wrapper class
11029 CameraMetadata settings;
11030 //translate from cam_capability_t to camera_metadata_tag_t
11031 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11032 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11033 int32_t defaultRequestID = 0;
11034 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11035
11036 /* OIS disable */
11037 char ois_prop[PROPERTY_VALUE_MAX];
11038 memset(ois_prop, 0, sizeof(ois_prop));
11039 property_get("persist.camera.ois.disable", ois_prop, "0");
11040 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11041
11042 /* Force video to use OIS */
11043 char videoOisProp[PROPERTY_VALUE_MAX];
11044 memset(videoOisProp, 0, sizeof(videoOisProp));
11045 property_get("persist.camera.ois.video", videoOisProp, "1");
11046 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011047
11048 // Hybrid AE enable/disable
11049 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11050 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11051 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11052 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11053
Thierry Strudel3d639192016-09-09 11:52:26 -070011054 uint8_t controlIntent = 0;
11055 uint8_t focusMode;
11056 uint8_t vsMode;
11057 uint8_t optStabMode;
11058 uint8_t cacMode;
11059 uint8_t edge_mode;
11060 uint8_t noise_red_mode;
11061 uint8_t tonemap_mode;
11062 bool highQualityModeEntryAvailable = FALSE;
11063 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011064 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011065 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11066 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011067 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011068 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011069 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011070
Thierry Strudel3d639192016-09-09 11:52:26 -070011071 switch (type) {
11072 case CAMERA3_TEMPLATE_PREVIEW:
11073 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11074 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11075 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11076 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11077 edge_mode = ANDROID_EDGE_MODE_FAST;
11078 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11079 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11080 break;
11081 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11082 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11083 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11084 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11085 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11086 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11087 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11088 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11089 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11090 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11091 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11092 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11093 highQualityModeEntryAvailable = TRUE;
11094 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11095 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11096 fastModeEntryAvailable = TRUE;
11097 }
11098 }
11099 if (highQualityModeEntryAvailable) {
11100 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11101 } else if (fastModeEntryAvailable) {
11102 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11103 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011104 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11105 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11106 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011107 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011108 break;
11109 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11110 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11111 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11112 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011113 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11114 edge_mode = ANDROID_EDGE_MODE_FAST;
11115 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11116 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11117 if (forceVideoOis)
11118 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11119 break;
11120 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11121 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11122 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11123 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011124 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11125 edge_mode = ANDROID_EDGE_MODE_FAST;
11126 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11127 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11128 if (forceVideoOis)
11129 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11130 break;
11131 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11132 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11133 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11134 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11135 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11136 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11137 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11138 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11139 break;
11140 case CAMERA3_TEMPLATE_MANUAL:
11141 edge_mode = ANDROID_EDGE_MODE_FAST;
11142 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11143 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11144 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11145 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11146 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11147 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11148 break;
11149 default:
11150 edge_mode = ANDROID_EDGE_MODE_FAST;
11151 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11152 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11153 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11154 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11155 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11156 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11157 break;
11158 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011159 // Set CAC to OFF if underlying device doesn't support
11160 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11161 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11162 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011163 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11164 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11165 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11166 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11167 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11168 }
11169 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011170 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011171 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011172
11173 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11174 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11175 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11176 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11177 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11178 || ois_disable)
11179 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11180 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011181 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011182
11183 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11184 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11185
11186 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11187 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11188
11189 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11190 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11191
11192 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11193 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11194
11195 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11196 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11197
11198 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11199 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11200
11201 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11202 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11203
11204 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11205 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11206
11207 /*flash*/
11208 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11209 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11210
11211 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11212 settings.update(ANDROID_FLASH_FIRING_POWER,
11213 &flashFiringLevel, 1);
11214
11215 /* lens */
11216 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11217 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11218
11219 if (gCamCapability[mCameraId]->filter_densities_count) {
11220 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11221 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11222 gCamCapability[mCameraId]->filter_densities_count);
11223 }
11224
11225 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11226 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11227
Thierry Strudel3d639192016-09-09 11:52:26 -070011228 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11229 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11230
11231 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11232 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11233
11234 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11235 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11236
11237 /* face detection (default to OFF) */
11238 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11239 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11240
Thierry Strudel54dc9782017-02-15 12:12:10 -080011241 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11242 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011243
11244 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11245 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11246
11247 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11248 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11249
Thierry Strudel3d639192016-09-09 11:52:26 -070011250
11251 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11252 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11253
11254 /* Exposure time(Update the Min Exposure Time)*/
11255 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11256 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11257
11258 /* frame duration */
11259 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11260 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11261
11262 /* sensitivity */
11263 static const int32_t default_sensitivity = 100;
11264 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011265#ifndef USE_HAL_3_3
11266 static const int32_t default_isp_sensitivity =
11267 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11268 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11269#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011270
11271 /*edge mode*/
11272 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11273
11274 /*noise reduction mode*/
11275 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11276
11277 /*color correction mode*/
11278 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11279 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11280
11281 /*transform matrix mode*/
11282 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11283
11284 int32_t scaler_crop_region[4];
11285 scaler_crop_region[0] = 0;
11286 scaler_crop_region[1] = 0;
11287 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11288 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11289 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11290
11291 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11292 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11293
11294 /*focus distance*/
11295 float focus_distance = 0.0;
11296 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11297
11298 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011299 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011300 float max_range = 0.0;
11301 float max_fixed_fps = 0.0;
11302 int32_t fps_range[2] = {0, 0};
11303 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11304 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011305 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11306 TEMPLATE_MAX_PREVIEW_FPS) {
11307 continue;
11308 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011309 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11310 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11311 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11312 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11313 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11314 if (range > max_range) {
11315 fps_range[0] =
11316 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11317 fps_range[1] =
11318 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11319 max_range = range;
11320 }
11321 } else {
11322 if (range < 0.01 && max_fixed_fps <
11323 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11324 fps_range[0] =
11325 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11326 fps_range[1] =
11327 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11328 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11329 }
11330 }
11331 }
11332 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11333
11334 /*precapture trigger*/
11335 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11336 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11337
11338 /*af trigger*/
11339 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11340 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11341
11342 /* ae & af regions */
11343 int32_t active_region[] = {
11344 gCamCapability[mCameraId]->active_array_size.left,
11345 gCamCapability[mCameraId]->active_array_size.top,
11346 gCamCapability[mCameraId]->active_array_size.left +
11347 gCamCapability[mCameraId]->active_array_size.width,
11348 gCamCapability[mCameraId]->active_array_size.top +
11349 gCamCapability[mCameraId]->active_array_size.height,
11350 0};
11351 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11352 sizeof(active_region) / sizeof(active_region[0]));
11353 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11354 sizeof(active_region) / sizeof(active_region[0]));
11355
11356 /* black level lock */
11357 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11358 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11359
Thierry Strudel3d639192016-09-09 11:52:26 -070011360 //special defaults for manual template
11361 if (type == CAMERA3_TEMPLATE_MANUAL) {
11362 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11363 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11364
11365 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11366 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11367
11368 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11369 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11370
11371 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11372 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11373
11374 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11375 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11376
11377 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11378 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11379 }
11380
11381
11382 /* TNR
11383 * We'll use this location to determine which modes TNR will be set.
11384 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11385 * This is not to be confused with linking on a per stream basis that decision
11386 * is still on per-session basis and will be handled as part of config stream
11387 */
11388 uint8_t tnr_enable = 0;
11389
11390 if (m_bTnrPreview || m_bTnrVideo) {
11391
11392 switch (type) {
11393 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11394 tnr_enable = 1;
11395 break;
11396
11397 default:
11398 tnr_enable = 0;
11399 break;
11400 }
11401
11402 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11403 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11404 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11405
11406 LOGD("TNR:%d with process plate %d for template:%d",
11407 tnr_enable, tnr_process_type, type);
11408 }
11409
11410 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011411 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011412 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11413
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011414 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011415 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11416
Shuzhen Wang920ea402017-05-03 08:49:39 -070011417 uint8_t related_camera_id = mCameraId;
11418 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011419
11420 /* CDS default */
11421 char prop[PROPERTY_VALUE_MAX];
11422 memset(prop, 0, sizeof(prop));
11423 property_get("persist.camera.CDS", prop, "Auto");
11424 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11425 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11426 if (CAM_CDS_MODE_MAX == cds_mode) {
11427 cds_mode = CAM_CDS_MODE_AUTO;
11428 }
11429
11430 /* Disabling CDS in templates which have TNR enabled*/
11431 if (tnr_enable)
11432 cds_mode = CAM_CDS_MODE_OFF;
11433
11434 int32_t mode = cds_mode;
11435 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011436
Thierry Strudel269c81a2016-10-12 12:13:59 -070011437 /* Manual Convergence AEC Speed is disabled by default*/
11438 float default_aec_speed = 0;
11439 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11440
11441 /* Manual Convergence AWB Speed is disabled by default*/
11442 float default_awb_speed = 0;
11443 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11444
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011445 // Set instant AEC to normal convergence by default
11446 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11447 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11448
Shuzhen Wang19463d72016-03-08 11:09:52 -080011449 /* hybrid ae */
11450 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11451
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011452 if (gExposeEnableZslKey) {
11453 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11454 }
11455
Thierry Strudel3d639192016-09-09 11:52:26 -070011456 mDefaultMetadata[type] = settings.release();
11457
11458 return mDefaultMetadata[type];
11459}
11460
11461/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011462 * FUNCTION : getExpectedFrameDuration
11463 *
11464 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11465 * duration
11466 *
11467 * PARAMETERS :
11468 * @request : request settings
11469 * @frameDuration : The maximum frame duration in nanoseconds
11470 *
11471 * RETURN : None
11472 *==========================================================================*/
11473void QCamera3HardwareInterface::getExpectedFrameDuration(
11474 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11475 if (nullptr == frameDuration) {
11476 return;
11477 }
11478
11479 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11480 find_camera_metadata_ro_entry(request,
11481 ANDROID_SENSOR_EXPOSURE_TIME,
11482 &e);
11483 if (e.count > 0) {
11484 *frameDuration = e.data.i64[0];
11485 }
11486 find_camera_metadata_ro_entry(request,
11487 ANDROID_SENSOR_FRAME_DURATION,
11488 &e);
11489 if (e.count > 0) {
11490 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11491 }
11492}
11493
11494/*===========================================================================
11495 * FUNCTION : calculateMaxExpectedDuration
11496 *
11497 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11498 * current camera settings.
11499 *
11500 * PARAMETERS :
11501 * @request : request settings
11502 *
11503 * RETURN : Expected frame duration in nanoseconds.
11504 *==========================================================================*/
11505nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11506 const camera_metadata_t *request) {
11507 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11508 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11509 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11510 if (e.count == 0) {
11511 return maxExpectedDuration;
11512 }
11513
11514 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11515 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11516 }
11517
11518 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11519 return maxExpectedDuration;
11520 }
11521
11522 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11523 if (e.count == 0) {
11524 return maxExpectedDuration;
11525 }
11526
11527 switch (e.data.u8[0]) {
11528 case ANDROID_CONTROL_AE_MODE_OFF:
11529 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11530 break;
11531 default:
11532 find_camera_metadata_ro_entry(request,
11533 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11534 &e);
11535 if (e.count > 1) {
11536 maxExpectedDuration = 1e9 / e.data.u8[0];
11537 }
11538 break;
11539 }
11540
11541 return maxExpectedDuration;
11542}
11543
11544/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011545 * FUNCTION : setFrameParameters
11546 *
11547 * DESCRIPTION: set parameters per frame as requested in the metadata from
11548 * framework
11549 *
11550 * PARAMETERS :
11551 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011552 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011553 * @blob_request: Whether this request is a blob request or not
11554 *
11555 * RETURN : success: NO_ERROR
11556 * failure:
11557 *==========================================================================*/
11558int QCamera3HardwareInterface::setFrameParameters(
11559 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011560 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011561 int blob_request,
11562 uint32_t snapshotStreamId)
11563{
11564 /*translate from camera_metadata_t type to parm_type_t*/
11565 int rc = 0;
11566 int32_t hal_version = CAM_HAL_V3;
11567
11568 clear_metadata_buffer(mParameters);
11569 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11570 LOGE("Failed to set hal version in the parameters");
11571 return BAD_VALUE;
11572 }
11573
11574 /*we need to update the frame number in the parameters*/
11575 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11576 request->frame_number)) {
11577 LOGE("Failed to set the frame number in the parameters");
11578 return BAD_VALUE;
11579 }
11580
11581 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011582 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011583 LOGE("Failed to set stream type mask in the parameters");
11584 return BAD_VALUE;
11585 }
11586
11587 if (mUpdateDebugLevel) {
11588 uint32_t dummyDebugLevel = 0;
11589 /* The value of dummyDebugLevel is irrelavent. On
11590 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11591 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11592 dummyDebugLevel)) {
11593 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11594 return BAD_VALUE;
11595 }
11596 mUpdateDebugLevel = false;
11597 }
11598
11599 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011600 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011601 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11602 if (blob_request)
11603 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11604 }
11605
11606 return rc;
11607}
11608
11609/*===========================================================================
11610 * FUNCTION : setReprocParameters
11611 *
11612 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11613 * return it.
11614 *
11615 * PARAMETERS :
11616 * @request : request that needs to be serviced
11617 *
11618 * RETURN : success: NO_ERROR
11619 * failure:
11620 *==========================================================================*/
11621int32_t QCamera3HardwareInterface::setReprocParameters(
11622 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11623 uint32_t snapshotStreamId)
11624{
11625 /*translate from camera_metadata_t type to parm_type_t*/
11626 int rc = 0;
11627
11628 if (NULL == request->settings){
11629 LOGE("Reprocess settings cannot be NULL");
11630 return BAD_VALUE;
11631 }
11632
11633 if (NULL == reprocParam) {
11634 LOGE("Invalid reprocessing metadata buffer");
11635 return BAD_VALUE;
11636 }
11637 clear_metadata_buffer(reprocParam);
11638
11639 /*we need to update the frame number in the parameters*/
11640 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11641 request->frame_number)) {
11642 LOGE("Failed to set the frame number in the parameters");
11643 return BAD_VALUE;
11644 }
11645
11646 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11647 if (rc < 0) {
11648 LOGE("Failed to translate reproc request");
11649 return rc;
11650 }
11651
11652 CameraMetadata frame_settings;
11653 frame_settings = request->settings;
11654 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11655 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11656 int32_t *crop_count =
11657 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11658 int32_t *crop_data =
11659 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11660 int32_t *roi_map =
11661 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11662 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11663 cam_crop_data_t crop_meta;
11664 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11665 crop_meta.num_of_streams = 1;
11666 crop_meta.crop_info[0].crop.left = crop_data[0];
11667 crop_meta.crop_info[0].crop.top = crop_data[1];
11668 crop_meta.crop_info[0].crop.width = crop_data[2];
11669 crop_meta.crop_info[0].crop.height = crop_data[3];
11670
11671 crop_meta.crop_info[0].roi_map.left =
11672 roi_map[0];
11673 crop_meta.crop_info[0].roi_map.top =
11674 roi_map[1];
11675 crop_meta.crop_info[0].roi_map.width =
11676 roi_map[2];
11677 crop_meta.crop_info[0].roi_map.height =
11678 roi_map[3];
11679
11680 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11681 rc = BAD_VALUE;
11682 }
11683 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11684 request->input_buffer->stream,
11685 crop_meta.crop_info[0].crop.left,
11686 crop_meta.crop_info[0].crop.top,
11687 crop_meta.crop_info[0].crop.width,
11688 crop_meta.crop_info[0].crop.height);
11689 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11690 request->input_buffer->stream,
11691 crop_meta.crop_info[0].roi_map.left,
11692 crop_meta.crop_info[0].roi_map.top,
11693 crop_meta.crop_info[0].roi_map.width,
11694 crop_meta.crop_info[0].roi_map.height);
11695 } else {
11696 LOGE("Invalid reprocess crop count %d!", *crop_count);
11697 }
11698 } else {
11699 LOGE("No crop data from matching output stream");
11700 }
11701
11702 /* These settings are not needed for regular requests so handle them specially for
11703 reprocess requests; information needed for EXIF tags */
11704 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11705 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11706 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11707 if (NAME_NOT_FOUND != val) {
11708 uint32_t flashMode = (uint32_t)val;
11709 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11710 rc = BAD_VALUE;
11711 }
11712 } else {
11713 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11714 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11715 }
11716 } else {
11717 LOGH("No flash mode in reprocess settings");
11718 }
11719
11720 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11721 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11722 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11723 rc = BAD_VALUE;
11724 }
11725 } else {
11726 LOGH("No flash state in reprocess settings");
11727 }
11728
11729 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11730 uint8_t *reprocessFlags =
11731 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11732 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11733 *reprocessFlags)) {
11734 rc = BAD_VALUE;
11735 }
11736 }
11737
Thierry Strudel54dc9782017-02-15 12:12:10 -080011738 // Add exif debug data to internal metadata
11739 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11740 mm_jpeg_debug_exif_params_t *debug_params =
11741 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11742 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11743 // AE
11744 if (debug_params->ae_debug_params_valid == TRUE) {
11745 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11746 debug_params->ae_debug_params);
11747 }
11748 // AWB
11749 if (debug_params->awb_debug_params_valid == TRUE) {
11750 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11751 debug_params->awb_debug_params);
11752 }
11753 // AF
11754 if (debug_params->af_debug_params_valid == TRUE) {
11755 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11756 debug_params->af_debug_params);
11757 }
11758 // ASD
11759 if (debug_params->asd_debug_params_valid == TRUE) {
11760 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11761 debug_params->asd_debug_params);
11762 }
11763 // Stats
11764 if (debug_params->stats_debug_params_valid == TRUE) {
11765 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11766 debug_params->stats_debug_params);
11767 }
11768 // BE Stats
11769 if (debug_params->bestats_debug_params_valid == TRUE) {
11770 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11771 debug_params->bestats_debug_params);
11772 }
11773 // BHIST
11774 if (debug_params->bhist_debug_params_valid == TRUE) {
11775 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11776 debug_params->bhist_debug_params);
11777 }
11778 // 3A Tuning
11779 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11780 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11781 debug_params->q3a_tuning_debug_params);
11782 }
11783 }
11784
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011785 // Add metadata which reprocess needs
11786 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11787 cam_reprocess_info_t *repro_info =
11788 (cam_reprocess_info_t *)frame_settings.find
11789 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011790 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011791 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011792 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011793 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011794 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011795 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011796 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011797 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011798 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011799 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011800 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011801 repro_info->pipeline_flip);
11802 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11803 repro_info->af_roi);
11804 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11805 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011806 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11807 CAM_INTF_PARM_ROTATION metadata then has been added in
11808 translateToHalMetadata. HAL need to keep this new rotation
11809 metadata. Otherwise, the old rotation info saved in the vendor tag
11810 would be used */
11811 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11812 CAM_INTF_PARM_ROTATION, reprocParam) {
11813 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11814 } else {
11815 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011816 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011817 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011818 }
11819
11820 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11821 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11822 roi.width and roi.height would be the final JPEG size.
11823 For now, HAL only checks this for reprocess request */
11824 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11825 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11826 uint8_t *enable =
11827 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11828 if (*enable == TRUE) {
11829 int32_t *crop_data =
11830 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11831 cam_stream_crop_info_t crop_meta;
11832 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11833 crop_meta.stream_id = 0;
11834 crop_meta.crop.left = crop_data[0];
11835 crop_meta.crop.top = crop_data[1];
11836 crop_meta.crop.width = crop_data[2];
11837 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011838 // The JPEG crop roi should match cpp output size
11839 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11840 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11841 crop_meta.roi_map.left = 0;
11842 crop_meta.roi_map.top = 0;
11843 crop_meta.roi_map.width = cpp_crop->crop.width;
11844 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011845 }
11846 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11847 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011848 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011849 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011850 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11851 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011852 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011853 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11854
11855 // Add JPEG scale information
11856 cam_dimension_t scale_dim;
11857 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11858 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11859 int32_t *roi =
11860 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11861 scale_dim.width = roi[2];
11862 scale_dim.height = roi[3];
11863 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11864 scale_dim);
11865 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11866 scale_dim.width, scale_dim.height, mCameraId);
11867 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011868 }
11869 }
11870
11871 return rc;
11872}
11873
11874/*===========================================================================
11875 * FUNCTION : saveRequestSettings
11876 *
11877 * DESCRIPTION: Add any settings that might have changed to the request settings
11878 * and save the settings to be applied on the frame
11879 *
11880 * PARAMETERS :
11881 * @jpegMetadata : the extracted and/or modified jpeg metadata
11882 * @request : request with initial settings
11883 *
11884 * RETURN :
11885 * camera_metadata_t* : pointer to the saved request settings
11886 *==========================================================================*/
11887camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11888 const CameraMetadata &jpegMetadata,
11889 camera3_capture_request_t *request)
11890{
11891 camera_metadata_t *resultMetadata;
11892 CameraMetadata camMetadata;
11893 camMetadata = request->settings;
11894
11895 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11896 int32_t thumbnail_size[2];
11897 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11898 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11899 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11900 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11901 }
11902
11903 if (request->input_buffer != NULL) {
11904 uint8_t reprocessFlags = 1;
11905 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11906 (uint8_t*)&reprocessFlags,
11907 sizeof(reprocessFlags));
11908 }
11909
11910 resultMetadata = camMetadata.release();
11911 return resultMetadata;
11912}
11913
11914/*===========================================================================
11915 * FUNCTION : setHalFpsRange
11916 *
11917 * DESCRIPTION: set FPS range parameter
11918 *
11919 *
11920 * PARAMETERS :
11921 * @settings : Metadata from framework
11922 * @hal_metadata: Metadata buffer
11923 *
11924 *
11925 * RETURN : success: NO_ERROR
11926 * failure:
11927 *==========================================================================*/
11928int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11929 metadata_buffer_t *hal_metadata)
11930{
11931 int32_t rc = NO_ERROR;
11932 cam_fps_range_t fps_range;
11933 fps_range.min_fps = (float)
11934 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11935 fps_range.max_fps = (float)
11936 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11937 fps_range.video_min_fps = fps_range.min_fps;
11938 fps_range.video_max_fps = fps_range.max_fps;
11939
11940 LOGD("aeTargetFpsRange fps: [%f %f]",
11941 fps_range.min_fps, fps_range.max_fps);
11942 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11943 * follows:
11944 * ---------------------------------------------------------------|
11945 * Video stream is absent in configure_streams |
11946 * (Camcorder preview before the first video record |
11947 * ---------------------------------------------------------------|
11948 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11949 * | | | vid_min/max_fps|
11950 * ---------------------------------------------------------------|
11951 * NO | [ 30, 240] | 240 | [240, 240] |
11952 * |-------------|-------------|----------------|
11953 * | [240, 240] | 240 | [240, 240] |
11954 * ---------------------------------------------------------------|
11955 * Video stream is present in configure_streams |
11956 * ---------------------------------------------------------------|
11957 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11958 * | | | vid_min/max_fps|
11959 * ---------------------------------------------------------------|
11960 * NO | [ 30, 240] | 240 | [240, 240] |
11961 * (camcorder prev |-------------|-------------|----------------|
11962 * after video rec | [240, 240] | 240 | [240, 240] |
11963 * is stopped) | | | |
11964 * ---------------------------------------------------------------|
11965 * YES | [ 30, 240] | 240 | [240, 240] |
11966 * |-------------|-------------|----------------|
11967 * | [240, 240] | 240 | [240, 240] |
11968 * ---------------------------------------------------------------|
11969 * When Video stream is absent in configure_streams,
11970 * preview fps = sensor_fps / batchsize
11971 * Eg: for 240fps at batchSize 4, preview = 60fps
11972 * for 120fps at batchSize 4, preview = 30fps
11973 *
11974 * When video stream is present in configure_streams, preview fps is as per
11975 * the ratio of preview buffers to video buffers requested in process
11976 * capture request
11977 */
11978 mBatchSize = 0;
11979 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11980 fps_range.min_fps = fps_range.video_max_fps;
11981 fps_range.video_min_fps = fps_range.video_max_fps;
11982 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11983 fps_range.max_fps);
11984 if (NAME_NOT_FOUND != val) {
11985 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11986 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11987 return BAD_VALUE;
11988 }
11989
11990 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11991 /* If batchmode is currently in progress and the fps changes,
11992 * set the flag to restart the sensor */
11993 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11994 (mHFRVideoFps != fps_range.max_fps)) {
11995 mNeedSensorRestart = true;
11996 }
11997 mHFRVideoFps = fps_range.max_fps;
11998 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11999 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12000 mBatchSize = MAX_HFR_BATCH_SIZE;
12001 }
12002 }
12003 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12004
12005 }
12006 } else {
12007 /* HFR mode is session param in backend/ISP. This should be reset when
12008 * in non-HFR mode */
12009 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12010 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12011 return BAD_VALUE;
12012 }
12013 }
12014 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12015 return BAD_VALUE;
12016 }
12017 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12018 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12019 return rc;
12020}
12021
12022/*===========================================================================
12023 * FUNCTION : translateToHalMetadata
12024 *
12025 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12026 *
12027 *
12028 * PARAMETERS :
12029 * @request : request sent from framework
12030 *
12031 *
12032 * RETURN : success: NO_ERROR
12033 * failure:
12034 *==========================================================================*/
12035int QCamera3HardwareInterface::translateToHalMetadata
12036 (const camera3_capture_request_t *request,
12037 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012038 uint32_t snapshotStreamId) {
12039 if (request == nullptr || hal_metadata == nullptr) {
12040 return BAD_VALUE;
12041 }
12042
12043 int64_t minFrameDuration = getMinFrameDuration(request);
12044
12045 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12046 minFrameDuration);
12047}
12048
12049int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12050 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12051 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12052
Thierry Strudel3d639192016-09-09 11:52:26 -070012053 int rc = 0;
12054 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012055 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012056
12057 /* Do not change the order of the following list unless you know what you are
12058 * doing.
12059 * The order is laid out in such a way that parameters in the front of the table
12060 * may be used to override the parameters later in the table. Examples are:
12061 * 1. META_MODE should precede AEC/AWB/AF MODE
12062 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12063 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12064 * 4. Any mode should precede it's corresponding settings
12065 */
12066 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12067 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12068 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12069 rc = BAD_VALUE;
12070 }
12071 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12072 if (rc != NO_ERROR) {
12073 LOGE("extractSceneMode failed");
12074 }
12075 }
12076
12077 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12078 uint8_t fwk_aeMode =
12079 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12080 uint8_t aeMode;
12081 int32_t redeye;
12082
12083 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12084 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012085 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12086 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012087 } else {
12088 aeMode = CAM_AE_MODE_ON;
12089 }
12090 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12091 redeye = 1;
12092 } else {
12093 redeye = 0;
12094 }
12095
12096 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12097 fwk_aeMode);
12098 if (NAME_NOT_FOUND != val) {
12099 int32_t flashMode = (int32_t)val;
12100 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12101 }
12102
12103 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12104 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12105 rc = BAD_VALUE;
12106 }
12107 }
12108
12109 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12110 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12111 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12112 fwk_whiteLevel);
12113 if (NAME_NOT_FOUND != val) {
12114 uint8_t whiteLevel = (uint8_t)val;
12115 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12116 rc = BAD_VALUE;
12117 }
12118 }
12119 }
12120
12121 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12122 uint8_t fwk_cacMode =
12123 frame_settings.find(
12124 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12125 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12126 fwk_cacMode);
12127 if (NAME_NOT_FOUND != val) {
12128 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12129 bool entryAvailable = FALSE;
12130 // Check whether Frameworks set CAC mode is supported in device or not
12131 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12132 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12133 entryAvailable = TRUE;
12134 break;
12135 }
12136 }
12137 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12138 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12139 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12140 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12141 if (entryAvailable == FALSE) {
12142 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12143 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12144 } else {
12145 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12146 // High is not supported and so set the FAST as spec say's underlying
12147 // device implementation can be the same for both modes.
12148 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12149 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12150 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12151 // in order to avoid the fps drop due to high quality
12152 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12153 } else {
12154 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12155 }
12156 }
12157 }
12158 LOGD("Final cacMode is %d", cacMode);
12159 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12160 rc = BAD_VALUE;
12161 }
12162 } else {
12163 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12164 }
12165 }
12166
Jason Lee84ae9972017-02-24 13:24:24 -080012167 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012168 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012169 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012170 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012171 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12172 fwk_focusMode);
12173 if (NAME_NOT_FOUND != val) {
12174 uint8_t focusMode = (uint8_t)val;
12175 LOGD("set focus mode %d", focusMode);
12176 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12177 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12178 rc = BAD_VALUE;
12179 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012180 }
12181 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012182 } else {
12183 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12184 LOGE("Focus forced to infinity %d", focusMode);
12185 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12186 rc = BAD_VALUE;
12187 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012188 }
12189
Jason Lee84ae9972017-02-24 13:24:24 -080012190 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12191 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012192 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12193 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12194 focalDistance)) {
12195 rc = BAD_VALUE;
12196 }
12197 }
12198
12199 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12200 uint8_t fwk_antibandingMode =
12201 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12202 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12203 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12204 if (NAME_NOT_FOUND != val) {
12205 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012206 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12207 if (m60HzZone) {
12208 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12209 } else {
12210 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12211 }
12212 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012213 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12214 hal_antibandingMode)) {
12215 rc = BAD_VALUE;
12216 }
12217 }
12218 }
12219
12220 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12221 int32_t expCompensation = frame_settings.find(
12222 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12223 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12224 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12225 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12226 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012227 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012228 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12229 expCompensation)) {
12230 rc = BAD_VALUE;
12231 }
12232 }
12233
12234 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12235 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12236 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12237 rc = BAD_VALUE;
12238 }
12239 }
12240 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12241 rc = setHalFpsRange(frame_settings, hal_metadata);
12242 if (rc != NO_ERROR) {
12243 LOGE("setHalFpsRange failed");
12244 }
12245 }
12246
12247 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12248 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12249 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12250 rc = BAD_VALUE;
12251 }
12252 }
12253
12254 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12255 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12256 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12257 fwk_effectMode);
12258 if (NAME_NOT_FOUND != val) {
12259 uint8_t effectMode = (uint8_t)val;
12260 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12261 rc = BAD_VALUE;
12262 }
12263 }
12264 }
12265
12266 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12267 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12268 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12269 colorCorrectMode)) {
12270 rc = BAD_VALUE;
12271 }
12272 }
12273
12274 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12275 cam_color_correct_gains_t colorCorrectGains;
12276 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12277 colorCorrectGains.gains[i] =
12278 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12279 }
12280 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12281 colorCorrectGains)) {
12282 rc = BAD_VALUE;
12283 }
12284 }
12285
12286 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12287 cam_color_correct_matrix_t colorCorrectTransform;
12288 cam_rational_type_t transform_elem;
12289 size_t num = 0;
12290 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12291 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12292 transform_elem.numerator =
12293 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12294 transform_elem.denominator =
12295 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12296 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12297 num++;
12298 }
12299 }
12300 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12301 colorCorrectTransform)) {
12302 rc = BAD_VALUE;
12303 }
12304 }
12305
12306 cam_trigger_t aecTrigger;
12307 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12308 aecTrigger.trigger_id = -1;
12309 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12310 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12311 aecTrigger.trigger =
12312 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12313 aecTrigger.trigger_id =
12314 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12315 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12316 aecTrigger)) {
12317 rc = BAD_VALUE;
12318 }
12319 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12320 aecTrigger.trigger, aecTrigger.trigger_id);
12321 }
12322
12323 /*af_trigger must come with a trigger id*/
12324 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12325 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12326 cam_trigger_t af_trigger;
12327 af_trigger.trigger =
12328 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12329 af_trigger.trigger_id =
12330 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12331 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12332 rc = BAD_VALUE;
12333 }
12334 LOGD("AfTrigger: %d AfTriggerID: %d",
12335 af_trigger.trigger, af_trigger.trigger_id);
12336 }
12337
12338 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12339 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12340 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12341 rc = BAD_VALUE;
12342 }
12343 }
12344 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12345 cam_edge_application_t edge_application;
12346 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012347
Thierry Strudel3d639192016-09-09 11:52:26 -070012348 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12349 edge_application.sharpness = 0;
12350 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012351 edge_application.sharpness =
12352 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12353 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12354 int32_t sharpness =
12355 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12356 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12357 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12358 LOGD("Setting edge mode sharpness %d", sharpness);
12359 edge_application.sharpness = sharpness;
12360 }
12361 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012362 }
12363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12364 rc = BAD_VALUE;
12365 }
12366 }
12367
12368 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12369 int32_t respectFlashMode = 1;
12370 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12371 uint8_t fwk_aeMode =
12372 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012373 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12374 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12375 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012376 respectFlashMode = 0;
12377 LOGH("AE Mode controls flash, ignore android.flash.mode");
12378 }
12379 }
12380 if (respectFlashMode) {
12381 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12382 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12383 LOGH("flash mode after mapping %d", val);
12384 // To check: CAM_INTF_META_FLASH_MODE usage
12385 if (NAME_NOT_FOUND != val) {
12386 uint8_t flashMode = (uint8_t)val;
12387 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12388 rc = BAD_VALUE;
12389 }
12390 }
12391 }
12392 }
12393
12394 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12395 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12396 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12397 rc = BAD_VALUE;
12398 }
12399 }
12400
12401 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12402 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12403 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12404 flashFiringTime)) {
12405 rc = BAD_VALUE;
12406 }
12407 }
12408
12409 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12410 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12411 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12412 hotPixelMode)) {
12413 rc = BAD_VALUE;
12414 }
12415 }
12416
12417 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12418 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12419 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12420 lensAperture)) {
12421 rc = BAD_VALUE;
12422 }
12423 }
12424
12425 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12426 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12427 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12428 filterDensity)) {
12429 rc = BAD_VALUE;
12430 }
12431 }
12432
12433 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12434 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12435 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12436 focalLength)) {
12437 rc = BAD_VALUE;
12438 }
12439 }
12440
12441 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12442 uint8_t optStabMode =
12443 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12444 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12445 optStabMode)) {
12446 rc = BAD_VALUE;
12447 }
12448 }
12449
12450 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12451 uint8_t videoStabMode =
12452 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12453 LOGD("videoStabMode from APP = %d", videoStabMode);
12454 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12455 videoStabMode)) {
12456 rc = BAD_VALUE;
12457 }
12458 }
12459
12460
12461 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12462 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12463 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12464 noiseRedMode)) {
12465 rc = BAD_VALUE;
12466 }
12467 }
12468
12469 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12470 float reprocessEffectiveExposureFactor =
12471 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12472 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12473 reprocessEffectiveExposureFactor)) {
12474 rc = BAD_VALUE;
12475 }
12476 }
12477
12478 cam_crop_region_t scalerCropRegion;
12479 bool scalerCropSet = false;
12480 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12481 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12482 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12483 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12484 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12485
12486 // Map coordinate system from active array to sensor output.
12487 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12488 scalerCropRegion.width, scalerCropRegion.height);
12489
12490 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12491 scalerCropRegion)) {
12492 rc = BAD_VALUE;
12493 }
12494 scalerCropSet = true;
12495 }
12496
12497 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12498 int64_t sensorExpTime =
12499 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12500 LOGD("setting sensorExpTime %lld", sensorExpTime);
12501 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12502 sensorExpTime)) {
12503 rc = BAD_VALUE;
12504 }
12505 }
12506
12507 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12508 int64_t sensorFrameDuration =
12509 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012510 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12511 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12512 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12513 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12514 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12515 sensorFrameDuration)) {
12516 rc = BAD_VALUE;
12517 }
12518 }
12519
12520 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12521 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12522 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12523 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12524 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12525 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12526 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12527 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12528 sensorSensitivity)) {
12529 rc = BAD_VALUE;
12530 }
12531 }
12532
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012533#ifndef USE_HAL_3_3
12534 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12535 int32_t ispSensitivity =
12536 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12537 if (ispSensitivity <
12538 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12539 ispSensitivity =
12540 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12541 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12542 }
12543 if (ispSensitivity >
12544 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12545 ispSensitivity =
12546 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12547 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12548 }
12549 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12550 ispSensitivity)) {
12551 rc = BAD_VALUE;
12552 }
12553 }
12554#endif
12555
Thierry Strudel3d639192016-09-09 11:52:26 -070012556 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12557 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12559 rc = BAD_VALUE;
12560 }
12561 }
12562
12563 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12564 uint8_t fwk_facedetectMode =
12565 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12566
12567 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12568 fwk_facedetectMode);
12569
12570 if (NAME_NOT_FOUND != val) {
12571 uint8_t facedetectMode = (uint8_t)val;
12572 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12573 facedetectMode)) {
12574 rc = BAD_VALUE;
12575 }
12576 }
12577 }
12578
Thierry Strudel54dc9782017-02-15 12:12:10 -080012579 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012580 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012581 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012582 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12583 histogramMode)) {
12584 rc = BAD_VALUE;
12585 }
12586 }
12587
12588 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12589 uint8_t sharpnessMapMode =
12590 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12591 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12592 sharpnessMapMode)) {
12593 rc = BAD_VALUE;
12594 }
12595 }
12596
12597 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12598 uint8_t tonemapMode =
12599 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12600 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12601 rc = BAD_VALUE;
12602 }
12603 }
12604 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12605 /*All tonemap channels will have the same number of points*/
12606 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12607 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12608 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12609 cam_rgb_tonemap_curves tonemapCurves;
12610 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12611 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12612 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12613 tonemapCurves.tonemap_points_cnt,
12614 CAM_MAX_TONEMAP_CURVE_SIZE);
12615 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12616 }
12617
12618 /* ch0 = G*/
12619 size_t point = 0;
12620 cam_tonemap_curve_t tonemapCurveGreen;
12621 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12622 for (size_t j = 0; j < 2; j++) {
12623 tonemapCurveGreen.tonemap_points[i][j] =
12624 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12625 point++;
12626 }
12627 }
12628 tonemapCurves.curves[0] = tonemapCurveGreen;
12629
12630 /* ch 1 = B */
12631 point = 0;
12632 cam_tonemap_curve_t tonemapCurveBlue;
12633 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12634 for (size_t j = 0; j < 2; j++) {
12635 tonemapCurveBlue.tonemap_points[i][j] =
12636 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12637 point++;
12638 }
12639 }
12640 tonemapCurves.curves[1] = tonemapCurveBlue;
12641
12642 /* ch 2 = R */
12643 point = 0;
12644 cam_tonemap_curve_t tonemapCurveRed;
12645 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12646 for (size_t j = 0; j < 2; j++) {
12647 tonemapCurveRed.tonemap_points[i][j] =
12648 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12649 point++;
12650 }
12651 }
12652 tonemapCurves.curves[2] = tonemapCurveRed;
12653
12654 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12655 tonemapCurves)) {
12656 rc = BAD_VALUE;
12657 }
12658 }
12659
12660 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12661 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12662 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12663 captureIntent)) {
12664 rc = BAD_VALUE;
12665 }
12666 }
12667
12668 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12669 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12670 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12671 blackLevelLock)) {
12672 rc = BAD_VALUE;
12673 }
12674 }
12675
12676 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12677 uint8_t lensShadingMapMode =
12678 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12680 lensShadingMapMode)) {
12681 rc = BAD_VALUE;
12682 }
12683 }
12684
12685 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12686 cam_area_t roi;
12687 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012688 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012689
12690 // Map coordinate system from active array to sensor output.
12691 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12692 roi.rect.height);
12693
12694 if (scalerCropSet) {
12695 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12696 }
12697 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12698 rc = BAD_VALUE;
12699 }
12700 }
12701
12702 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12703 cam_area_t roi;
12704 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012705 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012706
12707 // Map coordinate system from active array to sensor output.
12708 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12709 roi.rect.height);
12710
12711 if (scalerCropSet) {
12712 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12713 }
12714 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12715 rc = BAD_VALUE;
12716 }
12717 }
12718
12719 // CDS for non-HFR non-video mode
12720 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12721 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12722 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12723 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12724 LOGE("Invalid CDS mode %d!", *fwk_cds);
12725 } else {
12726 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12727 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12728 rc = BAD_VALUE;
12729 }
12730 }
12731 }
12732
Thierry Strudel04e026f2016-10-10 11:27:36 -070012733 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012734 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012735 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012736 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12737 }
12738 if (m_bVideoHdrEnabled)
12739 vhdr = CAM_VIDEO_HDR_MODE_ON;
12740
Thierry Strudel54dc9782017-02-15 12:12:10 -080012741 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12742
12743 if(vhdr != curr_hdr_state)
12744 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12745
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012746 rc = setVideoHdrMode(mParameters, vhdr);
12747 if (rc != NO_ERROR) {
12748 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012749 }
12750
12751 //IR
12752 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12753 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12754 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012755 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12756 uint8_t isIRon = 0;
12757
12758 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012759 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12760 LOGE("Invalid IR mode %d!", fwk_ir);
12761 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012762 if(isIRon != curr_ir_state )
12763 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12764
Thierry Strudel04e026f2016-10-10 11:27:36 -070012765 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12766 CAM_INTF_META_IR_MODE, fwk_ir)) {
12767 rc = BAD_VALUE;
12768 }
12769 }
12770 }
12771
Thierry Strudel54dc9782017-02-15 12:12:10 -080012772 //Binning Correction Mode
12773 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12774 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12775 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12776 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12777 || (0 > fwk_binning_correction)) {
12778 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12779 } else {
12780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12781 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12782 rc = BAD_VALUE;
12783 }
12784 }
12785 }
12786
Thierry Strudel269c81a2016-10-12 12:13:59 -070012787 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12788 float aec_speed;
12789 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12790 LOGD("AEC Speed :%f", aec_speed);
12791 if ( aec_speed < 0 ) {
12792 LOGE("Invalid AEC mode %f!", aec_speed);
12793 } else {
12794 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12795 aec_speed)) {
12796 rc = BAD_VALUE;
12797 }
12798 }
12799 }
12800
12801 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12802 float awb_speed;
12803 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12804 LOGD("AWB Speed :%f", awb_speed);
12805 if ( awb_speed < 0 ) {
12806 LOGE("Invalid AWB mode %f!", awb_speed);
12807 } else {
12808 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12809 awb_speed)) {
12810 rc = BAD_VALUE;
12811 }
12812 }
12813 }
12814
Thierry Strudel3d639192016-09-09 11:52:26 -070012815 // TNR
12816 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12817 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12818 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012819 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012820 cam_denoise_param_t tnr;
12821 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12822 tnr.process_plates =
12823 (cam_denoise_process_type_t)frame_settings.find(
12824 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12825 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012826
12827 if(b_TnrRequested != curr_tnr_state)
12828 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12829
Thierry Strudel3d639192016-09-09 11:52:26 -070012830 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12831 rc = BAD_VALUE;
12832 }
12833 }
12834
Thierry Strudel54dc9782017-02-15 12:12:10 -080012835 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012836 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012837 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012838 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12839 *exposure_metering_mode)) {
12840 rc = BAD_VALUE;
12841 }
12842 }
12843
Thierry Strudel3d639192016-09-09 11:52:26 -070012844 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12845 int32_t fwk_testPatternMode =
12846 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12847 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12848 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12849
12850 if (NAME_NOT_FOUND != testPatternMode) {
12851 cam_test_pattern_data_t testPatternData;
12852 memset(&testPatternData, 0, sizeof(testPatternData));
12853 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12854 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12855 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12856 int32_t *fwk_testPatternData =
12857 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12858 testPatternData.r = fwk_testPatternData[0];
12859 testPatternData.b = fwk_testPatternData[3];
12860 switch (gCamCapability[mCameraId]->color_arrangement) {
12861 case CAM_FILTER_ARRANGEMENT_RGGB:
12862 case CAM_FILTER_ARRANGEMENT_GRBG:
12863 testPatternData.gr = fwk_testPatternData[1];
12864 testPatternData.gb = fwk_testPatternData[2];
12865 break;
12866 case CAM_FILTER_ARRANGEMENT_GBRG:
12867 case CAM_FILTER_ARRANGEMENT_BGGR:
12868 testPatternData.gr = fwk_testPatternData[2];
12869 testPatternData.gb = fwk_testPatternData[1];
12870 break;
12871 default:
12872 LOGE("color arrangement %d is not supported",
12873 gCamCapability[mCameraId]->color_arrangement);
12874 break;
12875 }
12876 }
12877 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12878 testPatternData)) {
12879 rc = BAD_VALUE;
12880 }
12881 } else {
12882 LOGE("Invalid framework sensor test pattern mode %d",
12883 fwk_testPatternMode);
12884 }
12885 }
12886
12887 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12888 size_t count = 0;
12889 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12890 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12891 gps_coords.data.d, gps_coords.count, count);
12892 if (gps_coords.count != count) {
12893 rc = BAD_VALUE;
12894 }
12895 }
12896
12897 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12898 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12899 size_t count = 0;
12900 const char *gps_methods_src = (const char *)
12901 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12902 memset(gps_methods, '\0', sizeof(gps_methods));
12903 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12904 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12905 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12906 if (GPS_PROCESSING_METHOD_SIZE != count) {
12907 rc = BAD_VALUE;
12908 }
12909 }
12910
12911 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12912 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12913 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12914 gps_timestamp)) {
12915 rc = BAD_VALUE;
12916 }
12917 }
12918
12919 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12920 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12921 cam_rotation_info_t rotation_info;
12922 if (orientation == 0) {
12923 rotation_info.rotation = ROTATE_0;
12924 } else if (orientation == 90) {
12925 rotation_info.rotation = ROTATE_90;
12926 } else if (orientation == 180) {
12927 rotation_info.rotation = ROTATE_180;
12928 } else if (orientation == 270) {
12929 rotation_info.rotation = ROTATE_270;
12930 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012931 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012932 rotation_info.streamId = snapshotStreamId;
12933 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12934 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12935 rc = BAD_VALUE;
12936 }
12937 }
12938
12939 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12940 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12941 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12942 rc = BAD_VALUE;
12943 }
12944 }
12945
12946 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12947 uint32_t thumb_quality = (uint32_t)
12948 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12949 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12950 thumb_quality)) {
12951 rc = BAD_VALUE;
12952 }
12953 }
12954
12955 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12956 cam_dimension_t dim;
12957 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12958 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12959 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12960 rc = BAD_VALUE;
12961 }
12962 }
12963
12964 // Internal metadata
12965 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12966 size_t count = 0;
12967 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12968 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12969 privatedata.data.i32, privatedata.count, count);
12970 if (privatedata.count != count) {
12971 rc = BAD_VALUE;
12972 }
12973 }
12974
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012975 // ISO/Exposure Priority
12976 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12977 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12978 cam_priority_mode_t mode =
12979 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12980 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12981 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12982 use_iso_exp_pty.previewOnly = FALSE;
12983 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12984 use_iso_exp_pty.value = *ptr;
12985
12986 if(CAM_ISO_PRIORITY == mode) {
12987 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12988 use_iso_exp_pty)) {
12989 rc = BAD_VALUE;
12990 }
12991 }
12992 else {
12993 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12994 use_iso_exp_pty)) {
12995 rc = BAD_VALUE;
12996 }
12997 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012998
12999 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13000 rc = BAD_VALUE;
13001 }
13002 }
13003 } else {
13004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13005 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013006 }
13007 }
13008
13009 // Saturation
13010 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13011 int32_t* use_saturation =
13012 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13013 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13014 rc = BAD_VALUE;
13015 }
13016 }
13017
Thierry Strudel3d639192016-09-09 11:52:26 -070013018 // EV step
13019 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13020 gCamCapability[mCameraId]->exp_compensation_step)) {
13021 rc = BAD_VALUE;
13022 }
13023
13024 // CDS info
13025 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13026 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13027 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13028
13029 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13030 CAM_INTF_META_CDS_DATA, *cdsData)) {
13031 rc = BAD_VALUE;
13032 }
13033 }
13034
Shuzhen Wang19463d72016-03-08 11:09:52 -080013035 // Hybrid AE
13036 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13037 uint8_t *hybrid_ae = (uint8_t *)
13038 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
13039
13040 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13041 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13042 rc = BAD_VALUE;
13043 }
13044 }
13045
Shuzhen Wang14415f52016-11-16 18:26:18 -080013046 // Histogram
13047 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13048 uint8_t histogramMode =
13049 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13050 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13051 histogramMode)) {
13052 rc = BAD_VALUE;
13053 }
13054 }
13055
13056 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13057 int32_t histogramBins =
13058 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13059 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13060 histogramBins)) {
13061 rc = BAD_VALUE;
13062 }
13063 }
13064
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013065 // Tracking AF
13066 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13067 uint8_t trackingAfTrigger =
13068 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13069 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13070 trackingAfTrigger)) {
13071 rc = BAD_VALUE;
13072 }
13073 }
13074
Thierry Strudel3d639192016-09-09 11:52:26 -070013075 return rc;
13076}
13077
13078/*===========================================================================
13079 * FUNCTION : captureResultCb
13080 *
13081 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13082 *
13083 * PARAMETERS :
13084 * @frame : frame information from mm-camera-interface
13085 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13086 * @userdata: userdata
13087 *
13088 * RETURN : NONE
13089 *==========================================================================*/
13090void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13091 camera3_stream_buffer_t *buffer,
13092 uint32_t frame_number, bool isInputBuffer, void *userdata)
13093{
13094 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13095 if (hw == NULL) {
13096 LOGE("Invalid hw %p", hw);
13097 return;
13098 }
13099
13100 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13101 return;
13102}
13103
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013104/*===========================================================================
13105 * FUNCTION : setBufferErrorStatus
13106 *
13107 * DESCRIPTION: Callback handler for channels to report any buffer errors
13108 *
13109 * PARAMETERS :
13110 * @ch : Channel on which buffer error is reported from
13111 * @frame_number : frame number on which buffer error is reported on
13112 * @buffer_status : buffer error status
13113 * @userdata: userdata
13114 *
13115 * RETURN : NONE
13116 *==========================================================================*/
13117void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13118 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13119{
13120 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13121 if (hw == NULL) {
13122 LOGE("Invalid hw %p", hw);
13123 return;
13124 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013125
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013126 hw->setBufferErrorStatus(ch, frame_number, err);
13127 return;
13128}
13129
13130void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13131 uint32_t frameNumber, camera3_buffer_status_t err)
13132{
13133 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13134 pthread_mutex_lock(&mMutex);
13135
13136 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13137 if (req.frame_number != frameNumber)
13138 continue;
13139 for (auto& k : req.mPendingBufferList) {
13140 if(k.stream->priv == ch) {
13141 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13142 }
13143 }
13144 }
13145
13146 pthread_mutex_unlock(&mMutex);
13147 return;
13148}
Thierry Strudel3d639192016-09-09 11:52:26 -070013149/*===========================================================================
13150 * FUNCTION : initialize
13151 *
13152 * DESCRIPTION: Pass framework callback pointers to HAL
13153 *
13154 * PARAMETERS :
13155 *
13156 *
13157 * RETURN : Success : 0
13158 * Failure: -ENODEV
13159 *==========================================================================*/
13160
13161int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13162 const camera3_callback_ops_t *callback_ops)
13163{
13164 LOGD("E");
13165 QCamera3HardwareInterface *hw =
13166 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13167 if (!hw) {
13168 LOGE("NULL camera device");
13169 return -ENODEV;
13170 }
13171
13172 int rc = hw->initialize(callback_ops);
13173 LOGD("X");
13174 return rc;
13175}
13176
13177/*===========================================================================
13178 * FUNCTION : configure_streams
13179 *
13180 * DESCRIPTION:
13181 *
13182 * PARAMETERS :
13183 *
13184 *
13185 * RETURN : Success: 0
13186 * Failure: -EINVAL (if stream configuration is invalid)
13187 * -ENODEV (fatal error)
13188 *==========================================================================*/
13189
13190int QCamera3HardwareInterface::configure_streams(
13191 const struct camera3_device *device,
13192 camera3_stream_configuration_t *stream_list)
13193{
13194 LOGD("E");
13195 QCamera3HardwareInterface *hw =
13196 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13197 if (!hw) {
13198 LOGE("NULL camera device");
13199 return -ENODEV;
13200 }
13201 int rc = hw->configureStreams(stream_list);
13202 LOGD("X");
13203 return rc;
13204}
13205
13206/*===========================================================================
13207 * FUNCTION : construct_default_request_settings
13208 *
13209 * DESCRIPTION: Configure a settings buffer to meet the required use case
13210 *
13211 * PARAMETERS :
13212 *
13213 *
13214 * RETURN : Success: Return valid metadata
13215 * Failure: Return NULL
13216 *==========================================================================*/
13217const camera_metadata_t* QCamera3HardwareInterface::
13218 construct_default_request_settings(const struct camera3_device *device,
13219 int type)
13220{
13221
13222 LOGD("E");
13223 camera_metadata_t* fwk_metadata = NULL;
13224 QCamera3HardwareInterface *hw =
13225 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13226 if (!hw) {
13227 LOGE("NULL camera device");
13228 return NULL;
13229 }
13230
13231 fwk_metadata = hw->translateCapabilityToMetadata(type);
13232
13233 LOGD("X");
13234 return fwk_metadata;
13235}
13236
13237/*===========================================================================
13238 * FUNCTION : process_capture_request
13239 *
13240 * DESCRIPTION:
13241 *
13242 * PARAMETERS :
13243 *
13244 *
13245 * RETURN :
13246 *==========================================================================*/
13247int QCamera3HardwareInterface::process_capture_request(
13248 const struct camera3_device *device,
13249 camera3_capture_request_t *request)
13250{
13251 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013252 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013253 QCamera3HardwareInterface *hw =
13254 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13255 if (!hw) {
13256 LOGE("NULL camera device");
13257 return -EINVAL;
13258 }
13259
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013260 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013261 LOGD("X");
13262 return rc;
13263}
13264
13265/*===========================================================================
13266 * FUNCTION : dump
13267 *
13268 * DESCRIPTION:
13269 *
13270 * PARAMETERS :
13271 *
13272 *
13273 * RETURN :
13274 *==========================================================================*/
13275
13276void QCamera3HardwareInterface::dump(
13277 const struct camera3_device *device, int fd)
13278{
13279 /* Log level property is read when "adb shell dumpsys media.camera" is
13280 called so that the log level can be controlled without restarting
13281 the media server */
13282 getLogLevel();
13283
13284 LOGD("E");
13285 QCamera3HardwareInterface *hw =
13286 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13287 if (!hw) {
13288 LOGE("NULL camera device");
13289 return;
13290 }
13291
13292 hw->dump(fd);
13293 LOGD("X");
13294 return;
13295}
13296
13297/*===========================================================================
13298 * FUNCTION : flush
13299 *
13300 * DESCRIPTION:
13301 *
13302 * PARAMETERS :
13303 *
13304 *
13305 * RETURN :
13306 *==========================================================================*/
13307
13308int QCamera3HardwareInterface::flush(
13309 const struct camera3_device *device)
13310{
13311 int rc;
13312 LOGD("E");
13313 QCamera3HardwareInterface *hw =
13314 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13315 if (!hw) {
13316 LOGE("NULL camera device");
13317 return -EINVAL;
13318 }
13319
13320 pthread_mutex_lock(&hw->mMutex);
13321 // Validate current state
13322 switch (hw->mState) {
13323 case STARTED:
13324 /* valid state */
13325 break;
13326
13327 case ERROR:
13328 pthread_mutex_unlock(&hw->mMutex);
13329 hw->handleCameraDeviceError();
13330 return -ENODEV;
13331
13332 default:
13333 LOGI("Flush returned during state %d", hw->mState);
13334 pthread_mutex_unlock(&hw->mMutex);
13335 return 0;
13336 }
13337 pthread_mutex_unlock(&hw->mMutex);
13338
13339 rc = hw->flush(true /* restart channels */ );
13340 LOGD("X");
13341 return rc;
13342}
13343
13344/*===========================================================================
13345 * FUNCTION : close_camera_device
13346 *
13347 * DESCRIPTION:
13348 *
13349 * PARAMETERS :
13350 *
13351 *
13352 * RETURN :
13353 *==========================================================================*/
13354int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13355{
13356 int ret = NO_ERROR;
13357 QCamera3HardwareInterface *hw =
13358 reinterpret_cast<QCamera3HardwareInterface *>(
13359 reinterpret_cast<camera3_device_t *>(device)->priv);
13360 if (!hw) {
13361 LOGE("NULL camera device");
13362 return BAD_VALUE;
13363 }
13364
13365 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13366 delete hw;
13367 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013368 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013369 return ret;
13370}
13371
13372/*===========================================================================
13373 * FUNCTION : getWaveletDenoiseProcessPlate
13374 *
13375 * DESCRIPTION: query wavelet denoise process plate
13376 *
13377 * PARAMETERS : None
13378 *
13379 * RETURN : WNR prcocess plate value
13380 *==========================================================================*/
13381cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13382{
13383 char prop[PROPERTY_VALUE_MAX];
13384 memset(prop, 0, sizeof(prop));
13385 property_get("persist.denoise.process.plates", prop, "0");
13386 int processPlate = atoi(prop);
13387 switch(processPlate) {
13388 case 0:
13389 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13390 case 1:
13391 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13392 case 2:
13393 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13394 case 3:
13395 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13396 default:
13397 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13398 }
13399}
13400
13401
13402/*===========================================================================
13403 * FUNCTION : getTemporalDenoiseProcessPlate
13404 *
13405 * DESCRIPTION: query temporal denoise process plate
13406 *
13407 * PARAMETERS : None
13408 *
13409 * RETURN : TNR prcocess plate value
13410 *==========================================================================*/
13411cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13412{
13413 char prop[PROPERTY_VALUE_MAX];
13414 memset(prop, 0, sizeof(prop));
13415 property_get("persist.tnr.process.plates", prop, "0");
13416 int processPlate = atoi(prop);
13417 switch(processPlate) {
13418 case 0:
13419 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13420 case 1:
13421 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13422 case 2:
13423 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13424 case 3:
13425 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13426 default:
13427 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13428 }
13429}
13430
13431
13432/*===========================================================================
13433 * FUNCTION : extractSceneMode
13434 *
13435 * DESCRIPTION: Extract scene mode from frameworks set metadata
13436 *
13437 * PARAMETERS :
13438 * @frame_settings: CameraMetadata reference
13439 * @metaMode: ANDROID_CONTORL_MODE
13440 * @hal_metadata: hal metadata structure
13441 *
13442 * RETURN : None
13443 *==========================================================================*/
13444int32_t QCamera3HardwareInterface::extractSceneMode(
13445 const CameraMetadata &frame_settings, uint8_t metaMode,
13446 metadata_buffer_t *hal_metadata)
13447{
13448 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013449 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13450
13451 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13452 LOGD("Ignoring control mode OFF_KEEP_STATE");
13453 return NO_ERROR;
13454 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013455
13456 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13457 camera_metadata_ro_entry entry =
13458 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13459 if (0 == entry.count)
13460 return rc;
13461
13462 uint8_t fwk_sceneMode = entry.data.u8[0];
13463
13464 int val = lookupHalName(SCENE_MODES_MAP,
13465 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13466 fwk_sceneMode);
13467 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013468 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013469 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013470 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013471 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013472
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013473 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13474 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13475 }
13476
13477 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13478 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013479 cam_hdr_param_t hdr_params;
13480 hdr_params.hdr_enable = 1;
13481 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13482 hdr_params.hdr_need_1x = false;
13483 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13484 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13485 rc = BAD_VALUE;
13486 }
13487 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013488
Thierry Strudel3d639192016-09-09 11:52:26 -070013489 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13490 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13491 rc = BAD_VALUE;
13492 }
13493 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013494
13495 if (mForceHdrSnapshot) {
13496 cam_hdr_param_t hdr_params;
13497 hdr_params.hdr_enable = 1;
13498 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13499 hdr_params.hdr_need_1x = false;
13500 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13501 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13502 rc = BAD_VALUE;
13503 }
13504 }
13505
Thierry Strudel3d639192016-09-09 11:52:26 -070013506 return rc;
13507}
13508
13509/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013510 * FUNCTION : setVideoHdrMode
13511 *
13512 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13513 *
13514 * PARAMETERS :
13515 * @hal_metadata: hal metadata structure
13516 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13517 *
13518 * RETURN : None
13519 *==========================================================================*/
13520int32_t QCamera3HardwareInterface::setVideoHdrMode(
13521 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13522{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013523 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13524 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13525 }
13526
13527 LOGE("Invalid Video HDR mode %d!", vhdr);
13528 return BAD_VALUE;
13529}
13530
13531/*===========================================================================
13532 * FUNCTION : setSensorHDR
13533 *
13534 * DESCRIPTION: Enable/disable sensor HDR.
13535 *
13536 * PARAMETERS :
13537 * @hal_metadata: hal metadata structure
13538 * @enable: boolean whether to enable/disable sensor HDR
13539 *
13540 * RETURN : None
13541 *==========================================================================*/
13542int32_t QCamera3HardwareInterface::setSensorHDR(
13543 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13544{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013545 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013546 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13547
13548 if (enable) {
13549 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13550 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13551 #ifdef _LE_CAMERA_
13552 //Default to staggered HDR for IOT
13553 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13554 #else
13555 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13556 #endif
13557 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13558 }
13559
13560 bool isSupported = false;
13561 switch (sensor_hdr) {
13562 case CAM_SENSOR_HDR_IN_SENSOR:
13563 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13564 CAM_QCOM_FEATURE_SENSOR_HDR) {
13565 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013566 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013567 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013568 break;
13569 case CAM_SENSOR_HDR_ZIGZAG:
13570 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13571 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13572 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013573 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013574 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013575 break;
13576 case CAM_SENSOR_HDR_STAGGERED:
13577 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13578 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13579 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013580 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013581 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013582 break;
13583 case CAM_SENSOR_HDR_OFF:
13584 isSupported = true;
13585 LOGD("Turning off sensor HDR");
13586 break;
13587 default:
13588 LOGE("HDR mode %d not supported", sensor_hdr);
13589 rc = BAD_VALUE;
13590 break;
13591 }
13592
13593 if(isSupported) {
13594 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13595 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13596 rc = BAD_VALUE;
13597 } else {
13598 if(!isVideoHdrEnable)
13599 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013600 }
13601 }
13602 return rc;
13603}
13604
13605/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013606 * FUNCTION : needRotationReprocess
13607 *
13608 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13609 *
13610 * PARAMETERS : none
13611 *
13612 * RETURN : true: needed
13613 * false: no need
13614 *==========================================================================*/
13615bool QCamera3HardwareInterface::needRotationReprocess()
13616{
13617 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13618 // current rotation is not zero, and pp has the capability to process rotation
13619 LOGH("need do reprocess for rotation");
13620 return true;
13621 }
13622
13623 return false;
13624}
13625
13626/*===========================================================================
13627 * FUNCTION : needReprocess
13628 *
13629 * DESCRIPTION: if reprocess in needed
13630 *
13631 * PARAMETERS : none
13632 *
13633 * RETURN : true: needed
13634 * false: no need
13635 *==========================================================================*/
13636bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13637{
13638 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13639 // TODO: add for ZSL HDR later
13640 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13641 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13642 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13643 return true;
13644 } else {
13645 LOGH("already post processed frame");
13646 return false;
13647 }
13648 }
13649 return needRotationReprocess();
13650}
13651
13652/*===========================================================================
13653 * FUNCTION : needJpegExifRotation
13654 *
13655 * DESCRIPTION: if rotation from jpeg is needed
13656 *
13657 * PARAMETERS : none
13658 *
13659 * RETURN : true: needed
13660 * false: no need
13661 *==========================================================================*/
13662bool QCamera3HardwareInterface::needJpegExifRotation()
13663{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013664 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013665 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13666 LOGD("Need use Jpeg EXIF Rotation");
13667 return true;
13668 }
13669 return false;
13670}
13671
13672/*===========================================================================
13673 * FUNCTION : addOfflineReprocChannel
13674 *
13675 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13676 * coming from input channel
13677 *
13678 * PARAMETERS :
13679 * @config : reprocess configuration
13680 * @inputChHandle : pointer to the input (source) channel
13681 *
13682 *
13683 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13684 *==========================================================================*/
13685QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13686 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13687{
13688 int32_t rc = NO_ERROR;
13689 QCamera3ReprocessChannel *pChannel = NULL;
13690
13691 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013692 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13693 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013694 if (NULL == pChannel) {
13695 LOGE("no mem for reprocess channel");
13696 return NULL;
13697 }
13698
13699 rc = pChannel->initialize(IS_TYPE_NONE);
13700 if (rc != NO_ERROR) {
13701 LOGE("init reprocess channel failed, ret = %d", rc);
13702 delete pChannel;
13703 return NULL;
13704 }
13705
13706 // pp feature config
13707 cam_pp_feature_config_t pp_config;
13708 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13709
13710 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13711 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13712 & CAM_QCOM_FEATURE_DSDN) {
13713 //Use CPP CDS incase h/w supports it.
13714 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13715 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13716 }
13717 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13718 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13719 }
13720
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013721 if (config.hdr_param.hdr_enable) {
13722 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13723 pp_config.hdr_param = config.hdr_param;
13724 }
13725
13726 if (mForceHdrSnapshot) {
13727 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13728 pp_config.hdr_param.hdr_enable = 1;
13729 pp_config.hdr_param.hdr_need_1x = 0;
13730 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13731 }
13732
Thierry Strudel3d639192016-09-09 11:52:26 -070013733 rc = pChannel->addReprocStreamsFromSource(pp_config,
13734 config,
13735 IS_TYPE_NONE,
13736 mMetadataChannel);
13737
13738 if (rc != NO_ERROR) {
13739 delete pChannel;
13740 return NULL;
13741 }
13742 return pChannel;
13743}
13744
13745/*===========================================================================
13746 * FUNCTION : getMobicatMask
13747 *
13748 * DESCRIPTION: returns mobicat mask
13749 *
13750 * PARAMETERS : none
13751 *
13752 * RETURN : mobicat mask
13753 *
13754 *==========================================================================*/
13755uint8_t QCamera3HardwareInterface::getMobicatMask()
13756{
13757 return m_MobicatMask;
13758}
13759
13760/*===========================================================================
13761 * FUNCTION : setMobicat
13762 *
13763 * DESCRIPTION: set Mobicat on/off.
13764 *
13765 * PARAMETERS :
13766 * @params : none
13767 *
13768 * RETURN : int32_t type of status
13769 * NO_ERROR -- success
13770 * none-zero failure code
13771 *==========================================================================*/
13772int32_t QCamera3HardwareInterface::setMobicat()
13773{
Thierry Strudel3d639192016-09-09 11:52:26 -070013774 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013775
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013776 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013777 tune_cmd_t tune_cmd;
13778 tune_cmd.type = SET_RELOAD_CHROMATIX;
13779 tune_cmd.module = MODULE_ALL;
13780 tune_cmd.value = TRUE;
13781 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13782 CAM_INTF_PARM_SET_VFE_COMMAND,
13783 tune_cmd);
13784
13785 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13786 CAM_INTF_PARM_SET_PP_COMMAND,
13787 tune_cmd);
13788 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013789
13790 return ret;
13791}
13792
13793/*===========================================================================
13794* FUNCTION : getLogLevel
13795*
13796* DESCRIPTION: Reads the log level property into a variable
13797*
13798* PARAMETERS :
13799* None
13800*
13801* RETURN :
13802* None
13803*==========================================================================*/
13804void QCamera3HardwareInterface::getLogLevel()
13805{
13806 char prop[PROPERTY_VALUE_MAX];
13807 uint32_t globalLogLevel = 0;
13808
13809 property_get("persist.camera.hal.debug", prop, "0");
13810 int val = atoi(prop);
13811 if (0 <= val) {
13812 gCamHal3LogLevel = (uint32_t)val;
13813 }
13814
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013815 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013816 gKpiDebugLevel = atoi(prop);
13817
13818 property_get("persist.camera.global.debug", prop, "0");
13819 val = atoi(prop);
13820 if (0 <= val) {
13821 globalLogLevel = (uint32_t)val;
13822 }
13823
13824 /* Highest log level among hal.logs and global.logs is selected */
13825 if (gCamHal3LogLevel < globalLogLevel)
13826 gCamHal3LogLevel = globalLogLevel;
13827
13828 return;
13829}
13830
13831/*===========================================================================
13832 * FUNCTION : validateStreamRotations
13833 *
13834 * DESCRIPTION: Check if the rotations requested are supported
13835 *
13836 * PARAMETERS :
13837 * @stream_list : streams to be configured
13838 *
13839 * RETURN : NO_ERROR on success
13840 * -EINVAL on failure
13841 *
13842 *==========================================================================*/
13843int QCamera3HardwareInterface::validateStreamRotations(
13844 camera3_stream_configuration_t *streamList)
13845{
13846 int rc = NO_ERROR;
13847
13848 /*
13849 * Loop through all streams requested in configuration
13850 * Check if unsupported rotations have been requested on any of them
13851 */
13852 for (size_t j = 0; j < streamList->num_streams; j++){
13853 camera3_stream_t *newStream = streamList->streams[j];
13854
Emilian Peev35ceeed2017-06-29 11:58:56 -070013855 switch(newStream->rotation) {
13856 case CAMERA3_STREAM_ROTATION_0:
13857 case CAMERA3_STREAM_ROTATION_90:
13858 case CAMERA3_STREAM_ROTATION_180:
13859 case CAMERA3_STREAM_ROTATION_270:
13860 //Expected values
13861 break;
13862 default:
13863 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13864 "type:%d and stream format:%d", __func__,
13865 newStream->rotation, newStream->stream_type,
13866 newStream->format);
13867 return -EINVAL;
13868 }
13869
Thierry Strudel3d639192016-09-09 11:52:26 -070013870 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13871 bool isImplDef = (newStream->format ==
13872 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13873 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13874 isImplDef);
13875
13876 if (isRotated && (!isImplDef || isZsl)) {
13877 LOGE("Error: Unsupported rotation of %d requested for stream"
13878 "type:%d and stream format:%d",
13879 newStream->rotation, newStream->stream_type,
13880 newStream->format);
13881 rc = -EINVAL;
13882 break;
13883 }
13884 }
13885
13886 return rc;
13887}
13888
13889/*===========================================================================
13890* FUNCTION : getFlashInfo
13891*
13892* DESCRIPTION: Retrieve information about whether the device has a flash.
13893*
13894* PARAMETERS :
13895* @cameraId : Camera id to query
13896* @hasFlash : Boolean indicating whether there is a flash device
13897* associated with given camera
13898* @flashNode : If a flash device exists, this will be its device node.
13899*
13900* RETURN :
13901* None
13902*==========================================================================*/
13903void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13904 bool& hasFlash,
13905 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13906{
13907 cam_capability_t* camCapability = gCamCapability[cameraId];
13908 if (NULL == camCapability) {
13909 hasFlash = false;
13910 flashNode[0] = '\0';
13911 } else {
13912 hasFlash = camCapability->flash_available;
13913 strlcpy(flashNode,
13914 (char*)camCapability->flash_dev_name,
13915 QCAMERA_MAX_FILEPATH_LENGTH);
13916 }
13917}
13918
13919/*===========================================================================
13920* FUNCTION : getEepromVersionInfo
13921*
13922* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13923*
13924* PARAMETERS : None
13925*
13926* RETURN : string describing EEPROM version
13927* "\0" if no such info available
13928*==========================================================================*/
13929const char *QCamera3HardwareInterface::getEepromVersionInfo()
13930{
13931 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13932}
13933
13934/*===========================================================================
13935* FUNCTION : getLdafCalib
13936*
13937* DESCRIPTION: Retrieve Laser AF calibration data
13938*
13939* PARAMETERS : None
13940*
13941* RETURN : Two uint32_t describing laser AF calibration data
13942* NULL if none is available.
13943*==========================================================================*/
13944const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13945{
13946 if (mLdafCalibExist) {
13947 return &mLdafCalib[0];
13948 } else {
13949 return NULL;
13950 }
13951}
13952
13953/*===========================================================================
13954 * FUNCTION : dynamicUpdateMetaStreamInfo
13955 *
13956 * DESCRIPTION: This function:
13957 * (1) stops all the channels
13958 * (2) returns error on pending requests and buffers
13959 * (3) sends metastream_info in setparams
13960 * (4) starts all channels
13961 * This is useful when sensor has to be restarted to apply any
13962 * settings such as frame rate from a different sensor mode
13963 *
13964 * PARAMETERS : None
13965 *
13966 * RETURN : NO_ERROR on success
13967 * Error codes on failure
13968 *
13969 *==========================================================================*/
13970int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13971{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013972 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013973 int rc = NO_ERROR;
13974
13975 LOGD("E");
13976
13977 rc = stopAllChannels();
13978 if (rc < 0) {
13979 LOGE("stopAllChannels failed");
13980 return rc;
13981 }
13982
13983 rc = notifyErrorForPendingRequests();
13984 if (rc < 0) {
13985 LOGE("notifyErrorForPendingRequests failed");
13986 return rc;
13987 }
13988
13989 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13990 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13991 "Format:%d",
13992 mStreamConfigInfo.type[i],
13993 mStreamConfigInfo.stream_sizes[i].width,
13994 mStreamConfigInfo.stream_sizes[i].height,
13995 mStreamConfigInfo.postprocess_mask[i],
13996 mStreamConfigInfo.format[i]);
13997 }
13998
13999 /* Send meta stream info once again so that ISP can start */
14000 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14001 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14002 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14003 mParameters);
14004 if (rc < 0) {
14005 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14006 }
14007
14008 rc = startAllChannels();
14009 if (rc < 0) {
14010 LOGE("startAllChannels failed");
14011 return rc;
14012 }
14013
14014 LOGD("X");
14015 return rc;
14016}
14017
14018/*===========================================================================
14019 * FUNCTION : stopAllChannels
14020 *
14021 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14022 *
14023 * PARAMETERS : None
14024 *
14025 * RETURN : NO_ERROR on success
14026 * Error codes on failure
14027 *
14028 *==========================================================================*/
14029int32_t QCamera3HardwareInterface::stopAllChannels()
14030{
14031 int32_t rc = NO_ERROR;
14032
14033 LOGD("Stopping all channels");
14034 // Stop the Streams/Channels
14035 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14036 it != mStreamInfo.end(); it++) {
14037 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14038 if (channel) {
14039 channel->stop();
14040 }
14041 (*it)->status = INVALID;
14042 }
14043
14044 if (mSupportChannel) {
14045 mSupportChannel->stop();
14046 }
14047 if (mAnalysisChannel) {
14048 mAnalysisChannel->stop();
14049 }
14050 if (mRawDumpChannel) {
14051 mRawDumpChannel->stop();
14052 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014053 if (mHdrPlusRawSrcChannel) {
14054 mHdrPlusRawSrcChannel->stop();
14055 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014056 if (mMetadataChannel) {
14057 /* If content of mStreamInfo is not 0, there is metadata stream */
14058 mMetadataChannel->stop();
14059 }
14060
14061 LOGD("All channels stopped");
14062 return rc;
14063}
14064
14065/*===========================================================================
14066 * FUNCTION : startAllChannels
14067 *
14068 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14069 *
14070 * PARAMETERS : None
14071 *
14072 * RETURN : NO_ERROR on success
14073 * Error codes on failure
14074 *
14075 *==========================================================================*/
14076int32_t QCamera3HardwareInterface::startAllChannels()
14077{
14078 int32_t rc = NO_ERROR;
14079
14080 LOGD("Start all channels ");
14081 // Start the Streams/Channels
14082 if (mMetadataChannel) {
14083 /* If content of mStreamInfo is not 0, there is metadata stream */
14084 rc = mMetadataChannel->start();
14085 if (rc < 0) {
14086 LOGE("META channel start failed");
14087 return rc;
14088 }
14089 }
14090 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14091 it != mStreamInfo.end(); it++) {
14092 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14093 if (channel) {
14094 rc = channel->start();
14095 if (rc < 0) {
14096 LOGE("channel start failed");
14097 return rc;
14098 }
14099 }
14100 }
14101 if (mAnalysisChannel) {
14102 mAnalysisChannel->start();
14103 }
14104 if (mSupportChannel) {
14105 rc = mSupportChannel->start();
14106 if (rc < 0) {
14107 LOGE("Support channel start failed");
14108 return rc;
14109 }
14110 }
14111 if (mRawDumpChannel) {
14112 rc = mRawDumpChannel->start();
14113 if (rc < 0) {
14114 LOGE("RAW dump channel start failed");
14115 return rc;
14116 }
14117 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014118 if (mHdrPlusRawSrcChannel) {
14119 rc = mHdrPlusRawSrcChannel->start();
14120 if (rc < 0) {
14121 LOGE("HDR+ RAW channel start failed");
14122 return rc;
14123 }
14124 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014125
14126 LOGD("All channels started");
14127 return rc;
14128}
14129
14130/*===========================================================================
14131 * FUNCTION : notifyErrorForPendingRequests
14132 *
14133 * DESCRIPTION: This function sends error for all the pending requests/buffers
14134 *
14135 * PARAMETERS : None
14136 *
14137 * RETURN : Error codes
14138 * NO_ERROR on success
14139 *
14140 *==========================================================================*/
14141int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14142{
Emilian Peev7650c122017-01-19 08:24:33 -080014143 notifyErrorFoPendingDepthData(mDepthChannel);
14144
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014145 auto pendingRequest = mPendingRequestsList.begin();
14146 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014147
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014148 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14149 // buffers (for which buffers aren't sent yet).
14150 while (pendingRequest != mPendingRequestsList.end() ||
14151 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14152 if (pendingRequest == mPendingRequestsList.end() ||
14153 pendingBuffer->frame_number < pendingRequest->frame_number) {
14154 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14155 // with error.
14156 for (auto &info : pendingBuffer->mPendingBufferList) {
14157 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014158 camera3_notify_msg_t notify_msg;
14159 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14160 notify_msg.type = CAMERA3_MSG_ERROR;
14161 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014162 notify_msg.message.error.error_stream = info.stream;
14163 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014164 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014165
14166 camera3_stream_buffer_t buffer = {};
14167 buffer.acquire_fence = -1;
14168 buffer.release_fence = -1;
14169 buffer.buffer = info.buffer;
14170 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14171 buffer.stream = info.stream;
14172 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014173 }
14174
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014175 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14176 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14177 pendingBuffer->frame_number > pendingRequest->frame_number) {
14178 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014179 camera3_notify_msg_t notify_msg;
14180 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14181 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014182 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14183 notify_msg.message.error.error_stream = nullptr;
14184 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014185 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014186
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014187 if (pendingRequest->input_buffer != nullptr) {
14188 camera3_capture_result result = {};
14189 result.frame_number = pendingRequest->frame_number;
14190 result.result = nullptr;
14191 result.input_buffer = pendingRequest->input_buffer;
14192 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014193 }
14194
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014195 mShutterDispatcher.clear(pendingRequest->frame_number);
14196 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14197 } else {
14198 // If both buffers and result metadata weren't sent yet, notify about a request error
14199 // and return buffers with error.
14200 for (auto &info : pendingBuffer->mPendingBufferList) {
14201 camera3_notify_msg_t notify_msg;
14202 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14203 notify_msg.type = CAMERA3_MSG_ERROR;
14204 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14205 notify_msg.message.error.error_stream = info.stream;
14206 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14207 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014208
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014209 camera3_stream_buffer_t buffer = {};
14210 buffer.acquire_fence = -1;
14211 buffer.release_fence = -1;
14212 buffer.buffer = info.buffer;
14213 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14214 buffer.stream = info.stream;
14215 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14216 }
14217
14218 if (pendingRequest->input_buffer != nullptr) {
14219 camera3_capture_result result = {};
14220 result.frame_number = pendingRequest->frame_number;
14221 result.result = nullptr;
14222 result.input_buffer = pendingRequest->input_buffer;
14223 orchestrateResult(&result);
14224 }
14225
14226 mShutterDispatcher.clear(pendingRequest->frame_number);
14227 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14228 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014229 }
14230 }
14231
14232 /* Reset pending frame Drop list and requests list */
14233 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014234 mShutterDispatcher.clear();
14235 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014236 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014237 mExpectedFrameDuration = 0;
14238 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014239 LOGH("Cleared all the pending buffers ");
14240
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014241 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014242}
14243
14244bool QCamera3HardwareInterface::isOnEncoder(
14245 const cam_dimension_t max_viewfinder_size,
14246 uint32_t width, uint32_t height)
14247{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014248 return ((width > (uint32_t)max_viewfinder_size.width) ||
14249 (height > (uint32_t)max_viewfinder_size.height) ||
14250 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14251 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014252}
14253
14254/*===========================================================================
14255 * FUNCTION : setBundleInfo
14256 *
14257 * DESCRIPTION: Set bundle info for all streams that are bundle.
14258 *
14259 * PARAMETERS : None
14260 *
14261 * RETURN : NO_ERROR on success
14262 * Error codes on failure
14263 *==========================================================================*/
14264int32_t QCamera3HardwareInterface::setBundleInfo()
14265{
14266 int32_t rc = NO_ERROR;
14267
14268 if (mChannelHandle) {
14269 cam_bundle_config_t bundleInfo;
14270 memset(&bundleInfo, 0, sizeof(bundleInfo));
14271 rc = mCameraHandle->ops->get_bundle_info(
14272 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14273 if (rc != NO_ERROR) {
14274 LOGE("get_bundle_info failed");
14275 return rc;
14276 }
14277 if (mAnalysisChannel) {
14278 mAnalysisChannel->setBundleInfo(bundleInfo);
14279 }
14280 if (mSupportChannel) {
14281 mSupportChannel->setBundleInfo(bundleInfo);
14282 }
14283 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14284 it != mStreamInfo.end(); it++) {
14285 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14286 channel->setBundleInfo(bundleInfo);
14287 }
14288 if (mRawDumpChannel) {
14289 mRawDumpChannel->setBundleInfo(bundleInfo);
14290 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014291 if (mHdrPlusRawSrcChannel) {
14292 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14293 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014294 }
14295
14296 return rc;
14297}
14298
14299/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014300 * FUNCTION : setInstantAEC
14301 *
14302 * DESCRIPTION: Set Instant AEC related params.
14303 *
14304 * PARAMETERS :
14305 * @meta: CameraMetadata reference
14306 *
14307 * RETURN : NO_ERROR on success
14308 * Error codes on failure
14309 *==========================================================================*/
14310int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14311{
14312 int32_t rc = NO_ERROR;
14313 uint8_t val = 0;
14314 char prop[PROPERTY_VALUE_MAX];
14315
14316 // First try to configure instant AEC from framework metadata
14317 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14318 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14319 }
14320
14321 // If framework did not set this value, try to read from set prop.
14322 if (val == 0) {
14323 memset(prop, 0, sizeof(prop));
14324 property_get("persist.camera.instant.aec", prop, "0");
14325 val = (uint8_t)atoi(prop);
14326 }
14327
14328 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14329 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14330 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14331 mInstantAEC = val;
14332 mInstantAECSettledFrameNumber = 0;
14333 mInstantAecFrameIdxCount = 0;
14334 LOGH("instantAEC value set %d",val);
14335 if (mInstantAEC) {
14336 memset(prop, 0, sizeof(prop));
14337 property_get("persist.camera.ae.instant.bound", prop, "10");
14338 int32_t aec_frame_skip_cnt = atoi(prop);
14339 if (aec_frame_skip_cnt >= 0) {
14340 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14341 } else {
14342 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14343 rc = BAD_VALUE;
14344 }
14345 }
14346 } else {
14347 LOGE("Bad instant aec value set %d", val);
14348 rc = BAD_VALUE;
14349 }
14350 return rc;
14351}
14352
14353/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014354 * FUNCTION : get_num_overall_buffers
14355 *
14356 * DESCRIPTION: Estimate number of pending buffers across all requests.
14357 *
14358 * PARAMETERS : None
14359 *
14360 * RETURN : Number of overall pending buffers
14361 *
14362 *==========================================================================*/
14363uint32_t PendingBuffersMap::get_num_overall_buffers()
14364{
14365 uint32_t sum_buffers = 0;
14366 for (auto &req : mPendingBuffersInRequest) {
14367 sum_buffers += req.mPendingBufferList.size();
14368 }
14369 return sum_buffers;
14370}
14371
14372/*===========================================================================
14373 * FUNCTION : removeBuf
14374 *
14375 * DESCRIPTION: Remove a matching buffer from tracker.
14376 *
14377 * PARAMETERS : @buffer: image buffer for the callback
14378 *
14379 * RETURN : None
14380 *
14381 *==========================================================================*/
14382void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14383{
14384 bool buffer_found = false;
14385 for (auto req = mPendingBuffersInRequest.begin();
14386 req != mPendingBuffersInRequest.end(); req++) {
14387 for (auto k = req->mPendingBufferList.begin();
14388 k != req->mPendingBufferList.end(); k++ ) {
14389 if (k->buffer == buffer) {
14390 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14391 req->frame_number, buffer);
14392 k = req->mPendingBufferList.erase(k);
14393 if (req->mPendingBufferList.empty()) {
14394 // Remove this request from Map
14395 req = mPendingBuffersInRequest.erase(req);
14396 }
14397 buffer_found = true;
14398 break;
14399 }
14400 }
14401 if (buffer_found) {
14402 break;
14403 }
14404 }
14405 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14406 get_num_overall_buffers());
14407}
14408
14409/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014410 * FUNCTION : getBufErrStatus
14411 *
14412 * DESCRIPTION: get buffer error status
14413 *
14414 * PARAMETERS : @buffer: buffer handle
14415 *
14416 * RETURN : Error status
14417 *
14418 *==========================================================================*/
14419int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14420{
14421 for (auto& req : mPendingBuffersInRequest) {
14422 for (auto& k : req.mPendingBufferList) {
14423 if (k.buffer == buffer)
14424 return k.bufStatus;
14425 }
14426 }
14427 return CAMERA3_BUFFER_STATUS_OK;
14428}
14429
14430/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014431 * FUNCTION : setPAAFSupport
14432 *
14433 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14434 * feature mask according to stream type and filter
14435 * arrangement
14436 *
14437 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14438 * @stream_type: stream type
14439 * @filter_arrangement: filter arrangement
14440 *
14441 * RETURN : None
14442 *==========================================================================*/
14443void QCamera3HardwareInterface::setPAAFSupport(
14444 cam_feature_mask_t& feature_mask,
14445 cam_stream_type_t stream_type,
14446 cam_color_filter_arrangement_t filter_arrangement)
14447{
Thierry Strudel3d639192016-09-09 11:52:26 -070014448 switch (filter_arrangement) {
14449 case CAM_FILTER_ARRANGEMENT_RGGB:
14450 case CAM_FILTER_ARRANGEMENT_GRBG:
14451 case CAM_FILTER_ARRANGEMENT_GBRG:
14452 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014453 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14454 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014455 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014456 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14457 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014458 }
14459 break;
14460 case CAM_FILTER_ARRANGEMENT_Y:
14461 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14462 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14463 }
14464 break;
14465 default:
14466 break;
14467 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014468 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14469 feature_mask, stream_type, filter_arrangement);
14470
14471
Thierry Strudel3d639192016-09-09 11:52:26 -070014472}
14473
14474/*===========================================================================
14475* FUNCTION : getSensorMountAngle
14476*
14477* DESCRIPTION: Retrieve sensor mount angle
14478*
14479* PARAMETERS : None
14480*
14481* RETURN : sensor mount angle in uint32_t
14482*==========================================================================*/
14483uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14484{
14485 return gCamCapability[mCameraId]->sensor_mount_angle;
14486}
14487
14488/*===========================================================================
14489* FUNCTION : getRelatedCalibrationData
14490*
14491* DESCRIPTION: Retrieve related system calibration data
14492*
14493* PARAMETERS : None
14494*
14495* RETURN : Pointer of related system calibration data
14496*==========================================================================*/
14497const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14498{
14499 return (const cam_related_system_calibration_data_t *)
14500 &(gCamCapability[mCameraId]->related_cam_calibration);
14501}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014502
14503/*===========================================================================
14504 * FUNCTION : is60HzZone
14505 *
14506 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14507 *
14508 * PARAMETERS : None
14509 *
14510 * RETURN : True if in 60Hz zone, False otherwise
14511 *==========================================================================*/
14512bool QCamera3HardwareInterface::is60HzZone()
14513{
14514 time_t t = time(NULL);
14515 struct tm lt;
14516
14517 struct tm* r = localtime_r(&t, &lt);
14518
14519 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14520 return true;
14521 else
14522 return false;
14523}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014524
14525/*===========================================================================
14526 * FUNCTION : adjustBlackLevelForCFA
14527 *
14528 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14529 * of bayer CFA (Color Filter Array).
14530 *
14531 * PARAMETERS : @input: black level pattern in the order of RGGB
14532 * @output: black level pattern in the order of CFA
14533 * @color_arrangement: CFA color arrangement
14534 *
14535 * RETURN : None
14536 *==========================================================================*/
14537template<typename T>
14538void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14539 T input[BLACK_LEVEL_PATTERN_CNT],
14540 T output[BLACK_LEVEL_PATTERN_CNT],
14541 cam_color_filter_arrangement_t color_arrangement)
14542{
14543 switch (color_arrangement) {
14544 case CAM_FILTER_ARRANGEMENT_GRBG:
14545 output[0] = input[1];
14546 output[1] = input[0];
14547 output[2] = input[3];
14548 output[3] = input[2];
14549 break;
14550 case CAM_FILTER_ARRANGEMENT_GBRG:
14551 output[0] = input[2];
14552 output[1] = input[3];
14553 output[2] = input[0];
14554 output[3] = input[1];
14555 break;
14556 case CAM_FILTER_ARRANGEMENT_BGGR:
14557 output[0] = input[3];
14558 output[1] = input[2];
14559 output[2] = input[1];
14560 output[3] = input[0];
14561 break;
14562 case CAM_FILTER_ARRANGEMENT_RGGB:
14563 output[0] = input[0];
14564 output[1] = input[1];
14565 output[2] = input[2];
14566 output[3] = input[3];
14567 break;
14568 default:
14569 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14570 break;
14571 }
14572}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014573
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014574void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14575 CameraMetadata &resultMetadata,
14576 std::shared_ptr<metadata_buffer_t> settings)
14577{
14578 if (settings == nullptr) {
14579 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14580 return;
14581 }
14582
14583 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14584 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14585 }
14586
14587 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14588 String8 str((const char *)gps_methods);
14589 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14590 }
14591
14592 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14593 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14594 }
14595
14596 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14597 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14598 }
14599
14600 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14601 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14602 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14603 }
14604
14605 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14606 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14607 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14608 }
14609
14610 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14611 int32_t fwk_thumb_size[2];
14612 fwk_thumb_size[0] = thumb_size->width;
14613 fwk_thumb_size[1] = thumb_size->height;
14614 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14615 }
14616
14617 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14618 uint8_t fwk_intent = intent[0];
14619 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14620 }
14621}
14622
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014623bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14624 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14625 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014626{
14627 if (hdrPlusRequest == nullptr) return false;
14628
14629 // Check noise reduction mode is high quality.
14630 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14631 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14632 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014633 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14634 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014635 return false;
14636 }
14637
14638 // Check edge mode is high quality.
14639 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14640 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14641 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14642 return false;
14643 }
14644
14645 if (request.num_output_buffers != 1 ||
14646 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14647 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014648 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14649 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14650 request.output_buffers[0].stream->width,
14651 request.output_buffers[0].stream->height,
14652 request.output_buffers[0].stream->format);
14653 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014654 return false;
14655 }
14656
14657 // Get a YUV buffer from pic channel.
14658 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14659 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14660 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14661 if (res != OK) {
14662 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14663 __FUNCTION__, strerror(-res), res);
14664 return false;
14665 }
14666
14667 pbcamera::StreamBuffer buffer;
14668 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014669 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014670 buffer.data = yuvBuffer->buffer;
14671 buffer.dataSize = yuvBuffer->frame_len;
14672
14673 pbcamera::CaptureRequest pbRequest;
14674 pbRequest.id = request.frame_number;
14675 pbRequest.outputBuffers.push_back(buffer);
14676
14677 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014678 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014679 if (res != OK) {
14680 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14681 strerror(-res), res);
14682 return false;
14683 }
14684
14685 hdrPlusRequest->yuvBuffer = yuvBuffer;
14686 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14687
14688 return true;
14689}
14690
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014691status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14692{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014693 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14694 return OK;
14695 }
14696
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014697 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014698 if (res != OK) {
14699 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14700 strerror(-res), res);
14701 return res;
14702 }
14703 gHdrPlusClientOpening = true;
14704
14705 return OK;
14706}
14707
Chien-Yu Chenee335912017-02-09 17:53:20 -080014708status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14709{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014710 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014711
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014712 if (mHdrPlusModeEnabled) {
14713 return OK;
14714 }
14715
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014716 // Check if gHdrPlusClient is opened or being opened.
14717 if (gHdrPlusClient == nullptr) {
14718 if (gHdrPlusClientOpening) {
14719 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14720 return OK;
14721 }
14722
14723 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014724 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014725 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14726 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014727 return res;
14728 }
14729
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014730 // When opening HDR+ client completes, HDR+ mode will be enabled.
14731 return OK;
14732
Chien-Yu Chenee335912017-02-09 17:53:20 -080014733 }
14734
14735 // Configure stream for HDR+.
14736 res = configureHdrPlusStreamsLocked();
14737 if (res != OK) {
14738 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014739 return res;
14740 }
14741
14742 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14743 res = gHdrPlusClient->setZslHdrPlusMode(true);
14744 if (res != OK) {
14745 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014746 return res;
14747 }
14748
14749 mHdrPlusModeEnabled = true;
14750 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14751
14752 return OK;
14753}
14754
14755void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14756{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014757 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014758 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014759 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14760 if (res != OK) {
14761 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14762 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014763
14764 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014765 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014766 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014767 }
14768
14769 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014770 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014771 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14772}
14773
14774status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014775{
14776 pbcamera::InputConfiguration inputConfig;
14777 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14778 status_t res = OK;
14779
14780 // Configure HDR+ client streams.
14781 // Get input config.
14782 if (mHdrPlusRawSrcChannel) {
14783 // HDR+ input buffers will be provided by HAL.
14784 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14785 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14786 if (res != OK) {
14787 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14788 __FUNCTION__, strerror(-res), res);
14789 return res;
14790 }
14791
14792 inputConfig.isSensorInput = false;
14793 } else {
14794 // Sensor MIPI will send data to Easel.
14795 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014796 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014797 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14798 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14799 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14800 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14801 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014802 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014803 if (mSensorModeInfo.num_raw_bits != 10) {
14804 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14805 mSensorModeInfo.num_raw_bits);
14806 return BAD_VALUE;
14807 }
14808
14809 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014810 }
14811
14812 // Get output configurations.
14813 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014814 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014815
14816 // Easel may need to output YUV output buffers if mPictureChannel was created.
14817 pbcamera::StreamConfiguration yuvOutputConfig;
14818 if (mPictureChannel != nullptr) {
14819 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14820 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14821 if (res != OK) {
14822 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14823 __FUNCTION__, strerror(-res), res);
14824
14825 return res;
14826 }
14827
14828 outputStreamConfigs.push_back(yuvOutputConfig);
14829 }
14830
14831 // TODO: consider other channels for YUV output buffers.
14832
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014833 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014834 if (res != OK) {
14835 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14836 strerror(-res), res);
14837 return res;
14838 }
14839
14840 return OK;
14841}
14842
Chien-Yu Chen933db802017-07-14 14:31:53 -070014843void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
14844{
14845 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
14846 // Set HAL state to error.
14847 pthread_mutex_lock(&mMutex);
14848 mState = ERROR;
14849 pthread_mutex_unlock(&mMutex);
14850
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014851 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen933db802017-07-14 14:31:53 -070014852}
14853
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014854void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14855{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014856 if (client == nullptr) {
14857 ALOGE("%s: Opened client is null.", __FUNCTION__);
14858 return;
14859 }
14860
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014861 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014862 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14863
14864 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014865 if (!gHdrPlusClientOpening) {
14866 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14867 return;
14868 }
14869
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014870 gHdrPlusClient = std::move(client);
14871 gHdrPlusClientOpening = false;
14872
14873 // Set static metadata.
14874 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14875 if (res != OK) {
14876 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14877 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen44abb642017-06-02 18:00:38 -070014878 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014879 gHdrPlusClient = nullptr;
14880 return;
14881 }
14882
14883 // Enable HDR+ mode.
14884 res = enableHdrPlusModeLocked();
14885 if (res != OK) {
14886 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14887 }
14888}
14889
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014890void QCamera3HardwareInterface::onOpenFailed(status_t err)
14891{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014892 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14893 Mutex::Autolock l(gHdrPlusClientLock);
14894 gHdrPlusClientOpening = false;
14895}
14896
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014897void QCamera3HardwareInterface::onFatalError()
14898{
14899 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14900
14901 // Set HAL state to error.
14902 pthread_mutex_lock(&mMutex);
14903 mState = ERROR;
14904 pthread_mutex_unlock(&mMutex);
14905
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014906 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014907}
14908
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014909void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014910 const camera_metadata_t &resultMetadata)
14911{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014912 if (result != nullptr) {
14913 if (result->outputBuffers.size() != 1) {
14914 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14915 result->outputBuffers.size());
14916 return;
14917 }
14918
14919 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14920 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14921 result->outputBuffers[0].streamId);
14922 return;
14923 }
14924
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014925 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014926 HdrPlusPendingRequest pendingRequest;
14927 {
14928 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14929 auto req = mHdrPlusPendingRequests.find(result->requestId);
14930 pendingRequest = req->second;
14931 }
14932
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014933 // Update the result metadata with the settings of the HDR+ still capture request because
14934 // the result metadata belongs to a ZSL buffer.
14935 CameraMetadata metadata;
14936 metadata = &resultMetadata;
14937 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14938 camera_metadata_t* updatedResultMetadata = metadata.release();
14939
14940 QCamera3PicChannel *picChannel =
14941 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14942
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014943 // Check if dumping HDR+ YUV output is enabled.
14944 char prop[PROPERTY_VALUE_MAX];
14945 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14946 bool dumpYuvOutput = atoi(prop);
14947
14948 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014949 // Dump yuv buffer to a ppm file.
14950 pbcamera::StreamConfiguration outputConfig;
14951 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14952 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14953 if (rc == OK) {
14954 char buf[FILENAME_MAX] = {};
14955 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14956 result->requestId, result->outputBuffers[0].streamId,
14957 outputConfig.image.width, outputConfig.image.height);
14958
14959 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14960 } else {
14961 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14962 __FUNCTION__, strerror(-rc), rc);
14963 }
14964 }
14965
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014966 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14967 auto halMetadata = std::make_shared<metadata_buffer_t>();
14968 clear_metadata_buffer(halMetadata.get());
14969
14970 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14971 // encoding.
14972 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14973 halStreamId, /*minFrameDuration*/0);
14974 if (res == OK) {
14975 // Return the buffer to pic channel for encoding.
14976 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14977 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14978 halMetadata);
14979 } else {
14980 // Return the buffer without encoding.
14981 // TODO: This should not happen but we may want to report an error buffer to camera
14982 // service.
14983 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14984 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14985 strerror(-res), res);
14986 }
14987
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014988 // Find the timestamp
14989 camera_metadata_ro_entry_t entry;
14990 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14991 ANDROID_SENSOR_TIMESTAMP, &entry);
14992 if (res != OK) {
14993 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14994 __FUNCTION__, result->requestId, strerror(-res), res);
14995 } else {
14996 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14997 }
14998
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014999 // Send HDR+ metadata to framework.
15000 {
15001 pthread_mutex_lock(&mMutex);
15002
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015003 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15004 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015005 pthread_mutex_unlock(&mMutex);
15006 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015007
15008 // Remove the HDR+ pending request.
15009 {
15010 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15011 auto req = mHdrPlusPendingRequests.find(result->requestId);
15012 mHdrPlusPendingRequests.erase(req);
15013 }
15014 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015015}
15016
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015017void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15018{
15019 if (failedResult == nullptr) {
15020 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15021 return;
15022 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015023
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015024 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015025
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015026 // Remove the pending HDR+ request.
15027 {
15028 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15029 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
15030
15031 // Return the buffer to pic channel.
15032 QCamera3PicChannel *picChannel =
15033 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
15034 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
15035
15036 mHdrPlusPendingRequests.erase(pendingRequest);
15037 }
15038
15039 pthread_mutex_lock(&mMutex);
15040
15041 // Find the pending buffers.
15042 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15043 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15044 if (pendingBuffers->frame_number == failedResult->requestId) {
15045 break;
15046 }
15047 pendingBuffers++;
15048 }
15049
15050 // Send out buffer errors for the pending buffers.
15051 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15052 std::vector<camera3_stream_buffer_t> streamBuffers;
15053 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15054 // Prepare a stream buffer.
15055 camera3_stream_buffer_t streamBuffer = {};
15056 streamBuffer.stream = buffer.stream;
15057 streamBuffer.buffer = buffer.buffer;
15058 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15059 streamBuffer.acquire_fence = -1;
15060 streamBuffer.release_fence = -1;
15061
15062 streamBuffers.push_back(streamBuffer);
15063
15064 // Send out error buffer event.
15065 camera3_notify_msg_t notify_msg = {};
15066 notify_msg.type = CAMERA3_MSG_ERROR;
15067 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15068 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15069 notify_msg.message.error.error_stream = buffer.stream;
15070
15071 orchestrateNotify(&notify_msg);
15072 }
15073
15074 camera3_capture_result_t result = {};
15075 result.frame_number = pendingBuffers->frame_number;
15076 result.num_output_buffers = streamBuffers.size();
15077 result.output_buffers = &streamBuffers[0];
15078
15079 // Send out result with buffer errors.
15080 orchestrateResult(&result);
15081
15082 // Remove pending buffers.
15083 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15084 }
15085
15086 // Remove pending request.
15087 auto halRequest = mPendingRequestsList.begin();
15088 while (halRequest != mPendingRequestsList.end()) {
15089 if (halRequest->frame_number == failedResult->requestId) {
15090 mPendingRequestsList.erase(halRequest);
15091 break;
15092 }
15093 halRequest++;
15094 }
15095
15096 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015097}
15098
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015099
15100ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15101 mParent(parent) {}
15102
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015103void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015104{
15105 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015106
15107 if (isReprocess) {
15108 mReprocessShutters.emplace(frameNumber, Shutter());
15109 } else {
15110 mShutters.emplace(frameNumber, Shutter());
15111 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015112}
15113
15114void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15115{
15116 std::lock_guard<std::mutex> lock(mLock);
15117
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015118 std::map<uint32_t, Shutter> *shutters = nullptr;
15119
15120 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015121 auto shutter = mShutters.find(frameNumber);
15122 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015123 shutter = mReprocessShutters.find(frameNumber);
15124 if (shutter == mReprocessShutters.end()) {
15125 // Shutter was already sent.
15126 return;
15127 }
15128 shutters = &mReprocessShutters;
15129 } else {
15130 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015131 }
15132
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015133 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015134 shutter->second.ready = true;
15135 shutter->second.timestamp = timestamp;
15136
15137 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015138 shutter = shutters->begin();
15139 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015140 if (!shutter->second.ready) {
15141 // If this shutter is not ready, the following shutters can't be sent.
15142 break;
15143 }
15144
15145 camera3_notify_msg_t msg = {};
15146 msg.type = CAMERA3_MSG_SHUTTER;
15147 msg.message.shutter.frame_number = shutter->first;
15148 msg.message.shutter.timestamp = shutter->second.timestamp;
15149 mParent->orchestrateNotify(&msg);
15150
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015151 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015152 }
15153}
15154
15155void ShutterDispatcher::clear(uint32_t frameNumber)
15156{
15157 std::lock_guard<std::mutex> lock(mLock);
15158 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015159 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015160}
15161
15162void ShutterDispatcher::clear()
15163{
15164 std::lock_guard<std::mutex> lock(mLock);
15165
15166 // Log errors for stale shutters.
15167 for (auto &shutter : mShutters) {
15168 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15169 __FUNCTION__, shutter.first, shutter.second.ready,
15170 shutter.second.timestamp);
15171 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015172
15173 // Log errors for stale reprocess shutters.
15174 for (auto &shutter : mReprocessShutters) {
15175 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15176 __FUNCTION__, shutter.first, shutter.second.ready,
15177 shutter.second.timestamp);
15178 }
15179
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015180 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015181 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015182}
15183
15184OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15185 mParent(parent) {}
15186
15187status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15188{
15189 std::lock_guard<std::mutex> lock(mLock);
15190 mStreamBuffers.clear();
15191 if (!streamList) {
15192 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15193 return -EINVAL;
15194 }
15195
15196 // Create a "frame-number -> buffer" map for each stream.
15197 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15198 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15199 }
15200
15201 return OK;
15202}
15203
15204status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15205{
15206 std::lock_guard<std::mutex> lock(mLock);
15207
15208 // Find the "frame-number -> buffer" map for the stream.
15209 auto buffers = mStreamBuffers.find(stream);
15210 if (buffers == mStreamBuffers.end()) {
15211 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15212 return -EINVAL;
15213 }
15214
15215 // Create an unready buffer for this frame number.
15216 buffers->second.emplace(frameNumber, Buffer());
15217 return OK;
15218}
15219
15220void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15221 const camera3_stream_buffer_t &buffer)
15222{
15223 std::lock_guard<std::mutex> lock(mLock);
15224
15225 // Find the frame number -> buffer map for the stream.
15226 auto buffers = mStreamBuffers.find(buffer.stream);
15227 if (buffers == mStreamBuffers.end()) {
15228 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15229 return;
15230 }
15231
15232 // Find the unready buffer this frame number and mark it ready.
15233 auto pendingBuffer = buffers->second.find(frameNumber);
15234 if (pendingBuffer == buffers->second.end()) {
15235 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15236 return;
15237 }
15238
15239 pendingBuffer->second.ready = true;
15240 pendingBuffer->second.buffer = buffer;
15241
15242 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15243 pendingBuffer = buffers->second.begin();
15244 while (pendingBuffer != buffers->second.end()) {
15245 if (!pendingBuffer->second.ready) {
15246 // If this buffer is not ready, the following buffers can't be sent.
15247 break;
15248 }
15249
15250 camera3_capture_result_t result = {};
15251 result.frame_number = pendingBuffer->first;
15252 result.num_output_buffers = 1;
15253 result.output_buffers = &pendingBuffer->second.buffer;
15254
15255 // Send out result with buffer errors.
15256 mParent->orchestrateResult(&result);
15257
15258 pendingBuffer = buffers->second.erase(pendingBuffer);
15259 }
15260}
15261
15262void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15263{
15264 std::lock_guard<std::mutex> lock(mLock);
15265
15266 // Log errors for stale buffers.
15267 for (auto &buffers : mStreamBuffers) {
15268 for (auto &buffer : buffers.second) {
15269 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15270 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15271 }
15272 buffers.second.clear();
15273 }
15274
15275 if (clearConfiguredStreams) {
15276 mStreamBuffers.clear();
15277 }
15278}
15279
Thierry Strudel3d639192016-09-09 11:52:26 -070015280}; //end namespace qcamera